[ 482.585260] env[62730]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62730) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 482.585724] env[62730]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62730) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 482.585724] env[62730]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62730) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 482.586065] env[62730]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 482.682405] env[62730]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62730) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 482.692794] env[62730]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62730) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 482.839447] env[62730]: INFO nova.virt.driver [None req-619b4c16-db4a-4c4e-aa1c-3040cb3e5dd7 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 482.912213] env[62730]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 482.912431] env[62730]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 482.912526] env[62730]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62730) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 486.073782] env[62730]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-ad8a4d73-8861-41f0-92df-0a09ae032287 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.089676] env[62730]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62730) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 486.089817] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-e0779020-41c0-4cd8-bd8c-b6a96c2aa0a1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.124177] env[62730]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 64eea. [ 486.124349] env[62730]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.212s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.124921] env[62730]: INFO nova.virt.vmwareapi.driver [None req-619b4c16-db4a-4c4e-aa1c-3040cb3e5dd7 None None] VMware vCenter version: 7.0.3 [ 486.128602] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e37bc08-c2af-4908-a55a-fa5f657fdbbf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.146808] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f725a94b-7b37-4a31-83eb-4054dee40189 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.153405] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5df4b70-2c68-447e-b726-f3245801acbe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.160777] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d155367d-68b8-4934-a29c-c5a21e93a181 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.174877] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d195236-d48c-43d3-b3b7-0dba0b8f2d7b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.181365] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf61338-58fd-4b90-aa76-70440602bc0b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.212175] env[62730]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-e7b7a8ad-4eee-4818-9334-54986ff9d365 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.218042] env[62730]: DEBUG nova.virt.vmwareapi.driver [None req-619b4c16-db4a-4c4e-aa1c-3040cb3e5dd7 None None] Extension org.openstack.compute already exists. {{(pid=62730) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 486.220781] env[62730]: INFO nova.compute.provider_config [None req-619b4c16-db4a-4c4e-aa1c-3040cb3e5dd7 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 486.238711] env[62730]: DEBUG nova.context [None req-619b4c16-db4a-4c4e-aa1c-3040cb3e5dd7 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),c681b0c2-acdb-49a5-9d48-188cdce905d0(cell1) {{(pid=62730) load_cells /opt/stack/nova/nova/context.py:464}} [ 486.240784] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.241046] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.241752] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.242214] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Acquiring lock "c681b0c2-acdb-49a5-9d48-188cdce905d0" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.242422] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Lock "c681b0c2-acdb-49a5-9d48-188cdce905d0" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.243471] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Lock "c681b0c2-acdb-49a5-9d48-188cdce905d0" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.264898] env[62730]: INFO dbcounter [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Registered counter for database nova_cell0 [ 486.273200] env[62730]: INFO dbcounter [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Registered counter for database nova_cell1 [ 486.276470] env[62730]: DEBUG oslo_db.sqlalchemy.engines [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62730) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 486.276858] env[62730]: DEBUG oslo_db.sqlalchemy.engines [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62730) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 486.281560] env[62730]: DEBUG dbcounter [-] [62730] Writer thread running {{(pid=62730) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 486.282353] env[62730]: DEBUG dbcounter [-] [62730] Writer thread running {{(pid=62730) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 486.284507] env[62730]: ERROR nova.db.main.api [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 486.284507] env[62730]: result = function(*args, **kwargs) [ 486.284507] env[62730]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 486.284507] env[62730]: return func(*args, **kwargs) [ 486.284507] env[62730]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 486.284507] env[62730]: result = fn(*args, **kwargs) [ 486.284507] env[62730]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 486.284507] env[62730]: return f(*args, **kwargs) [ 486.284507] env[62730]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 486.284507] env[62730]: return db.service_get_minimum_version(context, binaries) [ 486.284507] env[62730]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 486.284507] env[62730]: _check_db_access() [ 486.284507] env[62730]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 486.284507] env[62730]: stacktrace = ''.join(traceback.format_stack()) [ 486.284507] env[62730]: [ 486.285575] env[62730]: ERROR nova.db.main.api [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 486.285575] env[62730]: result = function(*args, **kwargs) [ 486.285575] env[62730]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 486.285575] env[62730]: return func(*args, **kwargs) [ 486.285575] env[62730]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 486.285575] env[62730]: result = fn(*args, **kwargs) [ 486.285575] env[62730]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 486.285575] env[62730]: return f(*args, **kwargs) [ 486.285575] env[62730]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 486.285575] env[62730]: return db.service_get_minimum_version(context, binaries) [ 486.285575] env[62730]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 486.285575] env[62730]: _check_db_access() [ 486.285575] env[62730]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 486.285575] env[62730]: stacktrace = ''.join(traceback.format_stack()) [ 486.285575] env[62730]: [ 486.285983] env[62730]: WARNING nova.objects.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 486.286104] env[62730]: WARNING nova.objects.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Failed to get minimum service version for cell c681b0c2-acdb-49a5-9d48-188cdce905d0 [ 486.286580] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Acquiring lock "singleton_lock" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 486.286747] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Acquired lock "singleton_lock" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 486.286996] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Releasing lock "singleton_lock" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 486.287336] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Full set of CONF: {{(pid=62730) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 486.287504] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ******************************************************************************** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 486.287654] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] Configuration options gathered from: {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 486.287796] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 486.287994] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 486.288136] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ================================================================================ {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 486.288353] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] allow_resize_to_same_host = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.288527] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] arq_binding_timeout = 300 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.288656] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] backdoor_port = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.288863] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] backdoor_socket = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.288998] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] block_device_allocate_retries = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.289183] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] block_device_allocate_retries_interval = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.289356] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cert = self.pem {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.289522] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.289692] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute_monitors = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.289886] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] config_dir = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.290049] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] config_drive_format = iso9660 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.290219] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.290436] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] config_source = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.290614] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] console_host = devstack {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.290784] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] control_exchange = nova {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.290944] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cpu_allocation_ratio = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.291119] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] daemon = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.291310] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] debug = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.291466] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] default_access_ip_network_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.291668] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] default_availability_zone = nova {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.291839] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] default_ephemeral_format = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.292010] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] default_green_pool_size = 1000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.292263] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.292431] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] default_schedule_zone = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.292590] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] disk_allocation_ratio = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.292753] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] enable_new_services = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.292930] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] enabled_apis = ['osapi_compute'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.293128] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] enabled_ssl_apis = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.293303] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] flat_injected = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.293465] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] force_config_drive = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.293624] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] force_raw_images = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.293793] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] graceful_shutdown_timeout = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.293958] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] heal_instance_info_cache_interval = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.294193] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] host = cpu-1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.294411] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.294600] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.294766] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.294978] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.295157] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instance_build_timeout = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.295324] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instance_delete_interval = 300 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.295495] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instance_format = [instance: %(uuid)s] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.295667] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instance_name_template = instance-%08x {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.295834] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instance_usage_audit = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.296016] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instance_usage_audit_period = month {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.296191] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.296360] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.296530] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] internal_service_availability_zone = internal {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.296687] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] key = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.296852] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] live_migration_retry_count = 30 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.297071] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_config_append = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.297257] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.297425] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_dir = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.297587] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.297717] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_options = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.297881] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_rotate_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.298065] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_rotate_interval_type = days {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.298238] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] log_rotation_type = none {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.298401] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.298596] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.298715] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.298888] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.299029] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.299204] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] long_rpc_timeout = 1800 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.299366] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] max_concurrent_builds = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.299526] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] max_concurrent_live_migrations = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.299717] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] max_concurrent_snapshots = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.299888] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] max_local_block_devices = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.300058] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] max_logfile_count = 30 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.300220] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] max_logfile_size_mb = 200 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.300379] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] maximum_instance_delete_attempts = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.300549] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] metadata_listen = 0.0.0.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.300720] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] metadata_listen_port = 8775 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.300890] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] metadata_workers = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.301099] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] migrate_max_retries = -1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.301293] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] mkisofs_cmd = genisoimage {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.301504] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.301640] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] my_ip = 10.180.1.21 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.301802] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] network_allocate_retries = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.301981] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.302204] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.302334] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] osapi_compute_listen_port = 8774 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.302546] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] osapi_compute_unique_server_name_scope = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.302725] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] osapi_compute_workers = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.302892] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] password_length = 12 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.303066] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] periodic_enable = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.303230] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] periodic_fuzzy_delay = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.303400] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] pointer_model = usbtablet {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.303568] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] preallocate_images = none {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.303728] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] publish_errors = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.303899] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] pybasedir = /opt/stack/nova {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.304080] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ram_allocation_ratio = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.304248] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] rate_limit_burst = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.304415] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] rate_limit_except_level = CRITICAL {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.304573] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] rate_limit_interval = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.304734] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] reboot_timeout = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.304894] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] reclaim_instance_interval = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.305064] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] record = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.305257] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] reimage_timeout_per_gb = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.305439] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] report_interval = 120 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.305605] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] rescue_timeout = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.305765] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] reserved_host_cpus = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.305926] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] reserved_host_disk_mb = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.306096] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] reserved_host_memory_mb = 512 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.306261] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] reserved_huge_pages = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.306424] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] resize_confirm_window = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.306624] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] resize_fs_using_block_device = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.306803] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] resume_guests_state_on_host_boot = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.306973] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.307150] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] rpc_response_timeout = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.307314] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] run_external_periodic_tasks = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.307486] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] running_deleted_instance_action = reap {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.307648] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.307807] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] running_deleted_instance_timeout = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.308007] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler_instance_sync_interval = 120 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.308198] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_down_time = 720 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.308370] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] servicegroup_driver = db {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.308586] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] shelved_offload_time = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.308751] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] shelved_poll_interval = 3600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.308917] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] shutdown_timeout = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.309088] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] source_is_ipv6 = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.309301] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ssl_only = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.309572] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.309746] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] sync_power_state_interval = 600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.309908] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] sync_power_state_pool_size = 1000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.310090] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] syslog_log_facility = LOG_USER {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.310255] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] tempdir = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.310418] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] timeout_nbd = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.310620] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] transport_url = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.310803] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] update_resources_interval = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.310966] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] use_cow_images = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.311140] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] use_eventlog = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.311323] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] use_journal = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.311489] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] use_json = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.311647] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] use_rootwrap_daemon = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.311807] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] use_stderr = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.312011] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] use_syslog = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.312199] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vcpu_pin_set = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.312373] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plugging_is_fatal = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.312541] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plugging_timeout = 300 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.312706] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] virt_mkfs = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.312868] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] volume_usage_poll_interval = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.313040] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] watch_log_file = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.313212] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] web = /usr/share/spice-html5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 486.313448] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_concurrency.disable_process_locking = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.313747] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.313928] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.314108] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.314285] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.314456] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.314622] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.314877] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.auth_strategy = keystone {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.315073] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.compute_link_prefix = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.315259] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.315436] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.dhcp_domain = novalocal {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.315610] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.enable_instance_password = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.315775] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.glance_link_prefix = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.315949] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.316168] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.316350] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.instance_list_per_project_cells = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.316518] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.list_records_by_skipping_down_cells = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.316681] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.local_metadata_per_cell = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.316854] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.max_limit = 1000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.317035] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.metadata_cache_expiration = 15 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.317222] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.neutron_default_tenant_id = default {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.317403] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.use_neutron_default_nets = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.317597] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.317766] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.317937] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.318128] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.318303] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.vendordata_dynamic_targets = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.318474] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.vendordata_jsonfile_path = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.318662] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.318902] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.backend = dogpile.cache.memcached {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.319093] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.backend_argument = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.319296] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.config_prefix = cache.oslo {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.319482] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.dead_timeout = 60.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.319653] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.debug_cache_backend = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.319820] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.enable_retry_client = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.319985] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.enable_socket_keepalive = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.320200] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.enabled = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.320382] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.enforce_fips_mode = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.320551] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.expiration_time = 600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.320716] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.hashclient_retry_attempts = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.320885] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.321070] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_dead_retry = 300 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.321260] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_password = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.321469] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.321653] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.321822] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_pool_maxsize = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.321989] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.322174] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_sasl_enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.322360] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.322535] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.322702] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.memcache_username = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.322875] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.proxies = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.323050] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.redis_password = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.323229] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.323419] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.323593] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.redis_server = localhost:6379 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.323762] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.redis_socket_timeout = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.323956] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.redis_username = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.324155] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.retry_attempts = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.324333] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.retry_delay = 0.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.324500] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.socket_keepalive_count = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.324664] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.socket_keepalive_idle = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.324827] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.socket_keepalive_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.324988] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.tls_allowed_ciphers = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.325447] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.tls_cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.325447] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.tls_certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.325520] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.tls_enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.325635] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cache.tls_keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.325806] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.325982] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.auth_type = password {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.326160] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.326343] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.326507] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.326719] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.326901] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.cross_az_attach = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.327082] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.debug = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.327252] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.endpoint_template = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.327424] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.http_retries = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.327589] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.327750] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.327926] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.os_region_name = RegionOne {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.328117] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.328282] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cinder.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.328458] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.328624] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.cpu_dedicated_set = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.328784] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.cpu_shared_set = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.328952] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.image_type_exclude_list = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.329131] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.329340] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.329529] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.329697] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.329871] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.330049] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.resource_provider_association_refresh = 300 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.330219] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.330387] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.shutdown_retry_interval = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.330572] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.330786] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] conductor.workers = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.330974] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] console.allowed_origins = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.331156] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] console.ssl_ciphers = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.331338] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] console.ssl_minimum_version = default {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.331509] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] consoleauth.enforce_session_timeout = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.331682] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] consoleauth.token_ttl = 600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.331851] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.332050] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.332236] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.332401] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.connect_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.332563] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.connect_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.332723] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.endpoint_override = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.332888] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.333060] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.333231] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.max_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.333660] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.min_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.333660] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.region_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.333760] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.retriable_status_codes = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.333907] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.service_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.334093] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.service_type = accelerator {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.334266] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.334429] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.status_code_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.334597] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.status_code_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.334791] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.334983] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.335161] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] cyborg.version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.335384] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.backend = sqlalchemy {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.335570] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.connection = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.335758] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.connection_debug = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.335951] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.connection_parameters = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.336151] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.connection_recycle_time = 3600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.336749] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.connection_trace = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.336749] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.db_inc_retry_interval = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.336749] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.db_max_retries = 20 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.336904] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.db_max_retry_interval = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.337120] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.db_retry_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.337339] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.max_overflow = 50 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.337532] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.max_pool_size = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.337707] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.max_retries = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.337883] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.338057] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.mysql_wsrep_sync_wait = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.338228] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.pool_timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.338396] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.retry_interval = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.338558] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.slave_connection = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.338724] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.sqlite_synchronous = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.338890] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] database.use_db_reconnect = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.339082] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.backend = sqlalchemy {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.339259] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.connection = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.339430] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.connection_debug = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.339603] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.connection_parameters = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.339768] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.connection_recycle_time = 3600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.339935] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.connection_trace = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.340120] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.db_inc_retry_interval = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.340292] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.db_max_retries = 20 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.340460] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.db_max_retry_interval = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.340627] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.db_retry_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.340789] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.max_overflow = 50 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.340952] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.max_pool_size = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.341126] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.max_retries = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.341305] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.341466] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.341628] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.pool_timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.341792] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.retry_interval = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.341953] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.slave_connection = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.342129] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] api_database.sqlite_synchronous = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.342307] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] devices.enabled_mdev_types = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.342485] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.342658] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.342822] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ephemeral_storage_encryption.enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.342984] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.343169] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.api_servers = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.343336] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.343502] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.343669] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.343826] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.connect_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.343984] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.connect_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.344162] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.debug = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.344330] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.default_trusted_certificate_ids = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.344493] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.enable_certificate_validation = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.344656] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.enable_rbd_download = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.344816] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.endpoint_override = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.344983] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.345160] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.345323] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.max_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.345483] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.min_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.345646] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.num_retries = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.345817] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.rbd_ceph_conf = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.345980] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.rbd_connect_timeout = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.346164] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.rbd_pool = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.346331] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.rbd_user = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.346495] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.region_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.346657] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.retriable_status_codes = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.346816] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.service_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.346984] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.service_type = image {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.347162] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.347325] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.status_code_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.347485] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.status_code_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.347643] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.347825] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.347990] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.verify_glance_signatures = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.348166] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] glance.version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.348336] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] guestfs.debug = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.348512] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] mks.enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.348874] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.349077] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] image_cache.manager_interval = 2400 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.349254] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] image_cache.precache_concurrency = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.349426] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] image_cache.remove_unused_base_images = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.349594] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.349765] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.349941] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] image_cache.subdirectory_name = _base {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.350133] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.api_max_retries = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.350303] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.api_retry_interval = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.350466] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.350628] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.auth_type = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.350789] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.350947] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.351124] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.351295] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.conductor_group = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.351457] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.connect_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.351616] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.connect_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.351773] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.endpoint_override = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.351937] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.352109] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.352271] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.max_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.352428] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.min_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.352591] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.peer_list = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.352746] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.region_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.352903] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.retriable_status_codes = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.353075] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.serial_console_state_timeout = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.353238] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.service_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.353410] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.service_type = baremetal {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.353570] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.shard = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.353734] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.353894] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.status_code_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.354062] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.status_code_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.354226] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.354410] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.354572] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ironic.version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.354755] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.354929] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] key_manager.fixed_key = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.355128] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.355295] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.barbican_api_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.355455] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.barbican_endpoint = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.355624] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.barbican_endpoint_type = public {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.355784] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.barbican_region_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.355944] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.356119] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.356287] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.356451] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.356609] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.356774] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.number_of_retries = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.356936] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.retry_delay = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.357110] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.send_service_user_token = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.357277] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.357435] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.357597] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.verify_ssl = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.357757] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican.verify_ssl_path = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.357925] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.358113] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.auth_type = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.358282] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.358441] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.358607] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.358769] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.358929] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.359105] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.359312] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] barbican_service_user.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.359437] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.approle_role_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.359597] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.approle_secret_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.359758] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.359917] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.360092] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.360265] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.360424] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.360596] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.kv_mountpoint = secret {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.360758] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.kv_path = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.360924] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.kv_version = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.361092] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.namespace = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.361263] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.root_token_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.361427] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.361587] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.ssl_ca_crt_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.361747] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.361912] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.use_ssl = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.362094] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.362270] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.362438] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.auth_type = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.362598] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.362758] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.362923] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.363094] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.connect_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.363257] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.connect_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.363416] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.endpoint_override = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.363580] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.363739] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.363895] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.max_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.364062] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.min_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.364224] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.region_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.364386] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.retriable_status_codes = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.364544] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.service_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.364712] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.service_type = identity {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.364875] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.365043] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.status_code_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.365209] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.status_code_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.365367] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.365549] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.365708] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] keystone.version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.365910] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.connection_uri = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.366084] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.cpu_mode = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.366257] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.366427] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.cpu_models = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.366611] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.cpu_power_governor_high = performance {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.366784] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.366949] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.cpu_power_management = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.367137] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.367309] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.device_detach_attempts = 8 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.367472] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.device_detach_timeout = 20 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.367638] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.disk_cachemodes = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.367798] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.disk_prefix = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.367962] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.enabled_perf_events = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.368140] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.file_backed_memory = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.368306] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.gid_maps = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.368466] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.hw_disk_discard = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.368642] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.hw_machine_type = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.368814] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.images_rbd_ceph_conf = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.368980] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.369157] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.369329] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.images_rbd_glance_store_name = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.369503] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.images_rbd_pool = rbd {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.369670] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.images_type = default {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.369828] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.images_volume_group = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.369990] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.inject_key = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.370165] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.inject_partition = -2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.370330] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.inject_password = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.370491] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.iscsi_iface = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.370652] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.iser_use_multipath = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.370814] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.370976] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.371150] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_downtime = 500 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.371325] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.371491] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.371652] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_inbound_addr = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.371814] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.371973] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.372151] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_scheme = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.372327] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_timeout_action = abort {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.372497] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_tunnelled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.372658] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_uri = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.372820] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.live_migration_with_native_tls = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.372981] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.max_queues = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.373157] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.373395] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.373560] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.nfs_mount_options = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.373858] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.374042] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.374216] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.374383] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.374552] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.374714] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.num_pcie_ports = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.374881] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.375059] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.pmem_namespaces = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.375230] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.quobyte_client_cfg = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.375542] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.375718] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.375886] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.376063] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.376231] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rbd_secret_uuid = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.376397] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rbd_user = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.376559] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.376730] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.376890] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rescue_image_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.377060] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rescue_kernel_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.377227] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rescue_ramdisk_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.377400] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.377561] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.rx_queue_size = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.377730] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.smbfs_mount_options = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.378012] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.378193] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.snapshot_compression = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.378358] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.snapshot_image_format = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.378575] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.378743] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.sparse_logical_volumes = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.378907] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.swtpm_enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.379087] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.swtpm_group = tss {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.379263] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.swtpm_user = tss {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.379437] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.sysinfo_serial = unique {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.379612] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.tb_cache_size = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.379753] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.tx_queue_size = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.379919] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.uid_maps = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.380092] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.use_virtio_for_bridges = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.380267] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.virt_type = kvm {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.380437] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.volume_clear = zero {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.380601] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.volume_clear_size = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.380768] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.volume_use_multipath = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.380928] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.vzstorage_cache_path = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.381109] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.381287] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.381456] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.381625] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.381901] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.382089] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.vzstorage_mount_user = stack {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.382261] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.382440] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.382618] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.auth_type = password {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.382781] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.382943] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.383120] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.383285] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.connect_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.383447] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.connect_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.383622] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.default_floating_pool = public {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.383784] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.endpoint_override = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.383950] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.extension_sync_interval = 600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.384128] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.http_retries = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.384296] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.384459] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.384618] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.max_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.384791] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.384949] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.min_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.385131] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.ovs_bridge = br-int {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.385300] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.physnets = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.385475] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.region_name = RegionOne {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.385644] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.retriable_status_codes = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.385806] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.service_metadata_proxy = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.385966] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.service_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.386150] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.service_type = network {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.386316] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.386479] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.status_code_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.386639] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.status_code_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.386797] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.387255] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.387255] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] neutron.version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.387366] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] notifications.bdms_in_notifications = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.387487] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] notifications.default_level = INFO {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.387665] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] notifications.notification_format = unversioned {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.387830] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] notifications.notify_on_state_change = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.388015] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.388201] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] pci.alias = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.388373] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] pci.device_spec = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.388538] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] pci.report_in_placement = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.388710] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.388882] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.auth_type = password {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.389061] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.389228] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.389390] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.389553] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.389744] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.connect_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.389869] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.connect_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.390035] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.default_domain_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.390199] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.default_domain_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.390359] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.domain_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.390514] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.domain_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.390675] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.endpoint_override = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.390835] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.390994] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.391164] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.max_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.391325] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.min_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.391494] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.password = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.391654] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.project_domain_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.391820] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.project_domain_name = Default {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.391985] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.project_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.392175] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.project_name = service {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.392344] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.region_name = RegionOne {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.392507] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.retriable_status_codes = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.392666] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.service_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.392833] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.service_type = placement {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.392996] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.393172] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.status_code_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.393335] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.status_code_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.393496] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.system_scope = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.393653] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.393811] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.trust_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.393969] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.user_domain_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.394150] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.user_domain_name = Default {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.394312] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.user_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.394485] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.username = placement {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.394663] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.394823] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] placement.version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.395018] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.cores = 20 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.395183] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.count_usage_from_placement = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.395357] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.395535] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.injected_file_content_bytes = 10240 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.395703] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.injected_file_path_length = 255 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.395871] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.injected_files = 5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.396052] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.instances = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.396225] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.key_pairs = 100 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.396395] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.metadata_items = 128 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.396563] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.ram = 51200 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.396729] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.recheck_quota = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.396897] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.server_group_members = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.397075] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] quota.server_groups = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.397254] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.397424] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.397589] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.image_metadata_prefilter = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.397752] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.397918] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.max_attempts = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.398092] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.max_placement_results = 1000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.398261] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.398424] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.398588] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.398761] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] scheduler.workers = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.398940] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.399124] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.399307] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.399479] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.399644] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.399877] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.399965] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.400175] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.400347] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.host_subset_size = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.400514] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.400673] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.400836] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.401007] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.isolated_hosts = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.401184] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.isolated_images = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.401346] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.401510] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.401679] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.401843] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.pci_in_placement = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.402013] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.402191] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.402353] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.402512] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.402672] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.402829] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.402991] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.track_instance_changes = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.403182] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.403354] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] metrics.required = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.403518] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] metrics.weight_multiplier = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.403684] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.403846] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] metrics.weight_setting = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.404183] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.404364] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] serial_console.enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.404542] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] serial_console.port_range = 10000:20000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.404713] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.404882] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.405060] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] serial_console.serialproxy_port = 6083 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.405234] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.405412] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.auth_type = password {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.405574] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.405732] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.405893] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.406064] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.406224] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.406396] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.send_service_user_token = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.406557] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.406715] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] service_user.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.406904] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.agent_enabled = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.407078] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.407391] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.407589] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.407762] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.html5proxy_port = 6082 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.407925] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.image_compression = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.408099] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.jpeg_compression = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.408264] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.playback_compression = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.408438] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.server_listen = 127.0.0.1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.408605] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.408768] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.streaming_mode = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.408927] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] spice.zlib_compression = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.409107] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] upgrade_levels.baseapi = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.409283] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] upgrade_levels.compute = auto {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.409443] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] upgrade_levels.conductor = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.409602] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] upgrade_levels.scheduler = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.409765] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.409936] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.410099] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.410259] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.410425] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.410588] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.410746] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.410909] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.411076] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vendordata_dynamic_auth.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.411266] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.api_retry_count = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.411424] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.ca_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.411595] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.411763] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.cluster_name = testcl1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.411936] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.connection_pool_size = 10 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.412109] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.console_delay_seconds = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.412281] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.datastore_regex = ^datastore.* {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.412494] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.412666] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.host_password = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.412833] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.host_port = 443 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.413007] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.host_username = administrator@vsphere.local {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.413184] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.insecure = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.413350] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.integration_bridge = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.413512] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.maximum_objects = 100 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.413669] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.pbm_default_policy = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.413831] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.pbm_enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.413996] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.pbm_wsdl_location = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.414179] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.414342] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.serial_port_proxy_uri = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.414500] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.serial_port_service_uri = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.414667] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.task_poll_interval = 0.5 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.414837] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.use_linked_clone = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.415027] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.vnc_keymap = en-us {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.415182] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.vnc_port = 5900 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.415345] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vmware.vnc_port_total = 10000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.415530] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.auth_schemes = ['none'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.415701] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.416008] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.416203] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.416376] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.novncproxy_port = 6080 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.416551] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.server_listen = 127.0.0.1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.416722] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.416885] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.vencrypt_ca_certs = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.417049] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.vencrypt_client_cert = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.417211] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vnc.vencrypt_client_key = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.417390] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.417553] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.disable_deep_image_inspection = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.417712] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.417874] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.418043] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.418208] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.disable_rootwrap = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.418370] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.enable_numa_live_migration = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.418531] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.418693] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.418854] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.419018] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.libvirt_disable_apic = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.419186] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.419350] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.419512] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.419673] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.419834] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.419995] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.420192] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.420329] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.420490] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.420654] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.420838] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.421014] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.client_socket_timeout = 900 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.421186] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.default_pool_size = 1000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.421350] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.keep_alive = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.421516] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.max_header_line = 16384 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.421678] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.421838] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.ssl_ca_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.421997] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.ssl_cert_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.422201] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.ssl_key_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.422354] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.tcp_keepidle = 600 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.422507] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.422672] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] zvm.ca_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.422834] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] zvm.cloud_connector_url = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.423145] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.423320] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] zvm.reachable_timeout = 300 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.423537] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.enforce_new_defaults = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.423690] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.enforce_scope = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.423865] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.policy_default_rule = default {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.424058] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.424239] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.policy_file = policy.yaml {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.424414] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.424581] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.424737] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.424894] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.425069] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.425245] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.425425] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.425603] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.connection_string = messaging:// {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.425770] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.enabled = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.425939] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.es_doc_type = notification {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.426116] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.es_scroll_size = 10000 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.426288] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.es_scroll_time = 2m {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.426453] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.filter_error_trace = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.426620] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.hmac_keys = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.426787] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.sentinel_service_name = mymaster {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.426951] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.socket_timeout = 0.1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.427127] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.trace_requests = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.427319] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler.trace_sqlalchemy = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.427535] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler_jaeger.process_tags = {} {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.427665] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler_jaeger.service_name_prefix = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.427795] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] profiler_otlp.service_name_prefix = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.427965] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] remote_debug.host = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.428141] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] remote_debug.port = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.428325] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.428490] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.428656] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.428818] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.428980] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.429167] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.429320] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.429486] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.429648] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.429817] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.429975] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.430158] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.430329] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.430493] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.430665] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.430834] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.430997] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.431187] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.431350] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.431515] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.431679] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.431843] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.432016] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.432196] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.432359] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.432520] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.432682] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.432844] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.433017] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.433188] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.ssl = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.433365] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.433535] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.433698] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.433869] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.434050] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.434217] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.434412] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.434583] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_notifications.retry = -1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.434766] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.434940] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.435127] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.auth_section = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.435293] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.auth_type = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.435452] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.cafile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.435606] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.certfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.435767] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.collect_timing = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.435922] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.connect_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.436088] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.connect_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.436248] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.endpoint_id = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.436408] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.endpoint_override = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.436568] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.insecure = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.436722] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.keyfile = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.436878] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.max_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.437041] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.min_version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.437202] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.region_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.437365] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.retriable_status_codes = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.437529] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.service_name = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.437686] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.service_type = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.437848] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.split_loggers = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.438014] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.status_code_retries = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.438185] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.status_code_retry_delay = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.438345] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.timeout = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.438505] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.valid_interfaces = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.438662] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_limit.version = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.438830] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_reports.file_event_handler = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.438997] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.439172] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] oslo_reports.log_dir = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.439348] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.439511] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.439673] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.439840] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.440013] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.440179] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.440352] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.440515] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_ovs_privileged.group = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.440673] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.440839] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.441008] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.441176] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] vif_plug_ovs_privileged.user = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.441351] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.441534] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.441711] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.441883] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.442065] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.442240] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.442408] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.442573] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.442754] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.442925] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_ovs.isolate_vif = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.443113] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.443282] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.443452] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.443620] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.443784] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_vif_ovs.per_port_bridge = False {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.443950] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_brick.lock_path = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.444130] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.444295] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.444468] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] privsep_osbrick.capabilities = [21] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.444627] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] privsep_osbrick.group = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.444784] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] privsep_osbrick.helper_command = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.444947] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.445127] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.445288] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] privsep_osbrick.user = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.445462] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.445620] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] nova_sys_admin.group = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.445777] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] nova_sys_admin.helper_command = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.445940] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.446114] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.446274] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] nova_sys_admin.user = None {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 486.446406] env[62730]: DEBUG oslo_service.service [None req-ebc43de2-2be7-40fd-959a-46fd2fe07ba4 None None] ******************************************************************************** {{(pid=62730) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 486.446838] env[62730]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 486.457684] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Getting list of instances from cluster (obj){ [ 486.457684] env[62730]: value = "domain-c8" [ 486.457684] env[62730]: _type = "ClusterComputeResource" [ 486.457684] env[62730]: } {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 486.458962] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8327241c-55d5-407a-927f-6b77d33921d3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.468303] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Got total of 0 instances {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 486.468878] env[62730]: WARNING nova.virt.vmwareapi.driver [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 486.469359] env[62730]: INFO nova.virt.node [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Generated node identity 5ad8d442-72d6-4045-82dd-b3c7e74880a7 [ 486.469595] env[62730]: INFO nova.virt.node [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Wrote node identity 5ad8d442-72d6-4045-82dd-b3c7e74880a7 to /opt/stack/data/n-cpu-1/compute_id [ 486.484274] env[62730]: WARNING nova.compute.manager [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Compute nodes ['5ad8d442-72d6-4045-82dd-b3c7e74880a7'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 486.518735] env[62730]: INFO nova.compute.manager [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 486.544480] env[62730]: WARNING nova.compute.manager [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 486.544958] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.545216] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.545374] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.545533] env[62730]: DEBUG nova.compute.resource_tracker [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 486.546617] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f5fb69-9210-49b7-8904-c01e8fb098cc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.555357] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8809942-ff05-4105-913b-eab6560a6fda {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.569566] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66879eed-a91f-4999-8557-3d4e6e890d49 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.576090] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ccee85-7066-423a-adad-bce0aeae1f2f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.606735] env[62730]: DEBUG nova.compute.resource_tracker [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180556MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 486.606897] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.607105] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.619534] env[62730]: WARNING nova.compute.resource_tracker [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] No compute node record for cpu-1:5ad8d442-72d6-4045-82dd-b3c7e74880a7: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 5ad8d442-72d6-4045-82dd-b3c7e74880a7 could not be found. [ 486.632737] env[62730]: INFO nova.compute.resource_tracker [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 [ 486.689542] env[62730]: DEBUG nova.compute.resource_tracker [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 486.689748] env[62730]: DEBUG nova.compute.resource_tracker [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 486.797303] env[62730]: INFO nova.scheduler.client.report [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] [req-00391d5b-40ea-4b73-bd9d-6257bda1d399] Created resource provider record via placement API for resource provider with UUID 5ad8d442-72d6-4045-82dd-b3c7e74880a7 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 486.814863] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabf7189-83cf-422c-9da0-c6430adb2bee {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.822702] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce31ffd6-a4f5-4e9a-8c1c-586abd486f36 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.852254] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff2ede4-eaa4-49b9-89d2-a37deac47fdb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.860199] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724639e4-92fc-4d1b-8610-fb6c8f32c2a2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.875036] env[62730]: DEBUG nova.compute.provider_tree [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 486.917251] env[62730]: DEBUG nova.scheduler.client.report [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Updated inventory for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 486.917499] env[62730]: DEBUG nova.compute.provider_tree [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Updating resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 generation from 0 to 1 during operation: update_inventory {{(pid=62730) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 486.917656] env[62730]: DEBUG nova.compute.provider_tree [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 486.967985] env[62730]: DEBUG nova.compute.provider_tree [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Updating resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 generation from 1 to 2 during operation: update_traits {{(pid=62730) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 486.986058] env[62730]: DEBUG nova.compute.resource_tracker [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 486.986259] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.379s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.986426] env[62730]: DEBUG nova.service [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Creating RPC server for service compute {{(pid=62730) start /opt/stack/nova/nova/service.py:182}} [ 487.001282] env[62730]: DEBUG nova.service [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] Join ServiceGroup membership for this service compute {{(pid=62730) start /opt/stack/nova/nova/service.py:199}} [ 487.001474] env[62730]: DEBUG nova.servicegroup.drivers.db [None req-2975ba00-29d8-4a58-bc28-00a5fd735d46 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62730) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 494.004041] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 494.014807] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Getting list of instances from cluster (obj){ [ 494.014807] env[62730]: value = "domain-c8" [ 494.014807] env[62730]: _type = "ClusterComputeResource" [ 494.014807] env[62730]: } {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 494.015957] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4facd5-ebbc-4a1f-bf18-65dcd52e6c97 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.025553] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Got total of 0 instances {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 494.025778] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 494.026111] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Getting list of instances from cluster (obj){ [ 494.026111] env[62730]: value = "domain-c8" [ 494.026111] env[62730]: _type = "ClusterComputeResource" [ 494.026111] env[62730]: } {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 494.026974] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f92717-e3b2-450d-91eb-f0518fd7a1a0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 494.035096] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Got total of 0 instances {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 496.286029] env[62730]: DEBUG dbcounter [-] [62730] Writing DB stats nova_cell0:SELECT=1 {{(pid=62730) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 496.286029] env[62730]: DEBUG dbcounter [-] [62730] Writing DB stats nova_cell1:SELECT=1 {{(pid=62730) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 531.758550] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquiring lock "4b189162-95ca-4480-82a1-2025371f235a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.760029] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Lock "4b189162-95ca-4480-82a1-2025371f235a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.804899] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 531.954162] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.955661] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.956028] env[62730]: INFO nova.compute.claims [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.166441] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquiring lock "1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.166923] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Lock "1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.170134] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5ab6b8-e585-4ffc-842e-b8718a501ee5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.185639] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc00bd4a-280e-4faf-b0b6-9552f54c767d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.228357] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 532.231343] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c4832d-e214-44ee-b29e-81325bfe3139 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.242029] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb7bdb9-65ce-4d60-b9ed-47fcae390eb7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.262736] env[62730]: DEBUG nova.compute.provider_tree [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.279328] env[62730]: DEBUG nova.scheduler.client.report [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 532.355550] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.356199] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 532.382039] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.382320] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.383903] env[62730]: INFO nova.compute.claims [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.446021] env[62730]: DEBUG nova.compute.utils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.449383] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Not allocating networking since 'none' was specified. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 532.484939] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 532.577357] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0640d6a0-902c-4717-88b2-28b7dd17b63a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.589500] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c580d73-ea24-4407-a17a-2c4cd557e42c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.630031] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e68963-e163-42cd-bd4c-5ae19b4b51b1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.639115] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5197b20a-9118-4d61-826f-a614fd72959e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.648021] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 532.662236] env[62730]: DEBUG nova.compute.provider_tree [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.677392] env[62730]: DEBUG nova.scheduler.client.report [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 532.699820] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.701178] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 532.755925] env[62730]: DEBUG nova.compute.utils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.759675] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 532.759675] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 532.776991] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 532.898240] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 533.083081] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 533.083081] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 533.083247] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.083566] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 533.083566] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.084080] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 533.084080] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 533.084080] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 533.084432] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 533.084602] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 533.084776] env[62730]: DEBUG nova.virt.hardware [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 533.085923] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66626d3d-47bc-49ad-b5b0-38ffd72ba99c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.096913] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde70106-f9a9-430b-bed9-f28422f89038 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.119675] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 533.120033] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 533.120206] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.120424] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 533.120601] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.120924] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 533.121069] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 533.121309] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 533.121475] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 533.121687] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 533.121944] env[62730]: DEBUG nova.virt.hardware [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 533.124930] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6089052a-2525-4e2e-be58-6e5e3fbd8ba8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.136766] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb33bb35-f989-4911-b39b-19fff1314607 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.157352] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Instance VIF info [] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 533.168975] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 533.173530] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-075be2a7-8fe1-43da-8dd0-30708d79de23 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.176514] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2e1215-2276-4fce-a88f-ba82f9ce06c6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.195666] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Created folder: OpenStack in parent group-v4. [ 533.195917] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Creating folder: Project (dd64bc14ec224b22acbe0598227d9504). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 533.196268] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-464cddd7-43c3-482a-92b2-66987c9df8a6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.209704] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Created folder: Project (dd64bc14ec224b22acbe0598227d9504) in parent group-v942928. [ 533.209704] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Creating folder: Instances. Parent ref: group-v942929. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 533.209704] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-549af4b6-6d2e-4890-adf9-b329d0a86db1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.220986] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Created folder: Instances in parent group-v942929. [ 533.220986] env[62730]: DEBUG oslo.service.loopingcall [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 533.220986] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 533.220986] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fb76bb3-f928-419b-bd40-14d8cee712ad {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.245338] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 533.245338] env[62730]: value = "task-4837053" [ 533.245338] env[62730]: _type = "Task" [ 533.245338] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.255424] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837053, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.329376] env[62730]: DEBUG nova.policy [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb1e0a6d25024c45a2a57eb10f61b2ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf586645a56d4614a40b01d686d0c8bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 533.760109] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837053, 'name': CreateVM_Task, 'duration_secs': 0.428199} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.760405] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 533.761491] env[62730]: DEBUG oslo_vmware.service [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc40dcfd-8f29-42e6-89da-269086ba10f5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.774784] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.774970] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.775698] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 533.775985] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a452c364-f02f-42af-9772-2846adedd42a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.781432] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Waiting for the task: (returnval){ [ 533.781432] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]529921f6-25bc-1c63-fead-d1e09560dfde" [ 533.781432] env[62730]: _type = "Task" [ 533.781432] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.790369] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]529921f6-25bc-1c63-fead-d1e09560dfde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.896917] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.899943] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.918777] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 534.025113] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.026439] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.027698] env[62730]: INFO nova.compute.claims [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 534.161833] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60186f4-6c83-47a3-8314-47c858dd4601 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.175832] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a165490a-f0f7-43ae-b9c4-496085cd413c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.216800] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310378be-28e4-403d-b800-5cd3fcc77e89 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.227463] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9405af63-b8e2-4ec1-9b86-65c77f88b19a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.249539] env[62730]: DEBUG nova.compute.provider_tree [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.266173] env[62730]: DEBUG nova.scheduler.client.report [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 534.299276] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.299559] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 534.299824] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.299995] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.300582] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 534.301495] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.301999] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 534.310713] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd4d94fd-2a92-436b-a273-0949f79221c5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.332970] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 534.333767] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 534.335096] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234de726-8f68-4f34-94bc-7a7770b0d61b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.344905] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-addd67b2-1680-4620-853b-c808a331d28a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.355766] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Waiting for the task: (returnval){ [ 534.355766] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b5155c-d275-0a5e-69b2-7e55034c8d1d" [ 534.355766] env[62730]: _type = "Task" [ 534.355766] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.368699] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b5155c-d275-0a5e-69b2-7e55034c8d1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.376775] env[62730]: DEBUG nova.compute.utils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.378192] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 534.378410] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 534.398325] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 534.494530] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 534.525940] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 534.526663] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 534.526663] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 534.526663] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 534.526663] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 534.526975] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 534.527088] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 534.527362] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 534.527548] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 534.528031] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 534.528031] env[62730]: DEBUG nova.virt.hardware [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 534.529435] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a339dd3c-7a0e-4919-9689-f45accb4c02a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.540344] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c4b051-7f52-45cf-bdfd-71558930b954 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.657710] env[62730]: DEBUG nova.policy [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ec4bf5f7e104e0d8eae7bdd98861641', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ae994dbceb044ef8c023cb31350f1ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 534.737240] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "736075f4-302b-4b1a-9358-7fe2fb73a36f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.737464] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.778485] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 534.804024] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Successfully created port: d06f0929-6eb5-42a8-92f0-83dbbff25ec7 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 534.869299] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 534.869551] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Creating directory with path [datastore2] vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 534.869810] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24648ec1-c23d-49cd-bae5-05c24831fdc9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.876875] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.877129] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.878605] env[62730]: INFO nova.compute.claims [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 534.892257] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Created directory with path [datastore2] vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 534.892421] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Fetch image to [datastore2] vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 534.892587] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 534.893479] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de3076b-3e29-4f23-a0df-9539bcc77cbd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.918339] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4bf74f-c5f6-4714-855f-1e99579d5bde {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.932938] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7944e9-17cf-4ca0-a02c-67d7301c475c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.975204] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc04cce-1d94-48d7-b8fb-2288d610f8c4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.985956] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7f3845f1-c0c0-41d8-8fb1-bc8b5c5b0cc6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.027803] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 535.111889] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cdb720-78ac-4985-909e-5022161cee94 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.120619] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6808110-25fe-4d95-bc8f-dcbfbd3405d7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.166115] env[62730]: DEBUG oslo_vmware.rw_handles [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 535.168400] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8997060-0e3b-447e-bbff-871178f8dacc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.229918] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2277fa39-defc-4d73-8875-35fd99db29e4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.236870] env[62730]: DEBUG oslo_vmware.rw_handles [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 535.237063] env[62730]: DEBUG oslo_vmware.rw_handles [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 535.249405] env[62730]: DEBUG nova.compute.provider_tree [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.265139] env[62730]: DEBUG nova.scheduler.client.report [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.285676] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.408s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.287150] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 535.357870] env[62730]: DEBUG nova.compute.utils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 535.359437] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 535.359552] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 535.384857] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 535.485755] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 535.524853] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 535.525113] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 535.525665] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 535.525665] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 535.525665] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 535.526262] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 535.526617] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 535.526946] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 535.527067] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 535.527241] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 535.527540] env[62730]: DEBUG nova.virt.hardware [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 535.528792] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92706cdd-280e-46d8-aad0-b56d4e9ce8ef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.540908] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32a89bf-0a31-4603-8ac3-d6db15283adc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.630573] env[62730]: DEBUG nova.policy [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '874c4f54ae024940a2384274c27fb7b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42256023b89344de90ced8c51fd48cf6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 536.401509] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Successfully created port: 90cea150-d842-4950-a9e9-4df14e8d24d6 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.190432] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Successfully created port: c52225fa-e5ef-4fe5-970f-ba05d59bebfb {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.447324] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "0a718440-a0f8-4614-a9f3-553b2ff2e156" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.447652] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.470884] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 537.574387] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.574648] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.576165] env[62730]: INFO nova.compute.claims [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 537.625302] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Successfully updated port: d06f0929-6eb5-42a8-92f0-83dbbff25ec7 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 537.652272] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquiring lock "refresh_cache-1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.652272] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquired lock "refresh_cache-1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.652272] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 537.814533] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c26e646-7c3c-4e3c-96db-8497bfc768e2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.833184] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51c3c83-96c9-436c-8634-ad72f823775a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.869893] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a0ecdb-77b8-475a-8714-fa5516d47826 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.872496] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.878243] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44441744-4005-42ca-b8d0-6345862bddef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.894344] env[62730]: DEBUG nova.compute.provider_tree [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.907018] env[62730]: DEBUG nova.scheduler.client.report [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.941279] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.366s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.941800] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 538.011885] env[62730]: DEBUG nova.compute.utils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.015387] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 538.015387] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 538.034951] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 538.129844] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 538.171774] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 538.172073] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 538.172239] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 538.172426] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 538.172581] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 538.172737] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 538.173927] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 538.173927] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 538.173927] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 538.173927] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 538.173927] env[62730]: DEBUG nova.virt.hardware [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 538.174754] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bb0e0c-bc5d-4d6b-9667-34c7433eae30 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.183339] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b683dd-ddfa-4b28-9c2c-12b09640f5f4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.228019] env[62730]: DEBUG nova.policy [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba2c3e26fee84fa29aa8f6930505f1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b825311d36404f199e86101b21b30ad5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 538.599553] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Updating instance_info_cache with network_info: [{"id": "d06f0929-6eb5-42a8-92f0-83dbbff25ec7", "address": "fa:16:3e:1a:b8:4c", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd06f0929-6e", "ovs_interfaceid": "d06f0929-6eb5-42a8-92f0-83dbbff25ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.615691] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Releasing lock "refresh_cache-1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.616009] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Instance network_info: |[{"id": "d06f0929-6eb5-42a8-92f0-83dbbff25ec7", "address": "fa:16:3e:1a:b8:4c", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd06f0929-6e", "ovs_interfaceid": "d06f0929-6eb5-42a8-92f0-83dbbff25ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 538.616541] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:b8:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd06f0929-6eb5-42a8-92f0-83dbbff25ec7', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 538.625224] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Creating folder: Project (cf586645a56d4614a40b01d686d0c8bc). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 538.625898] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b61ccfb-24be-4060-8eeb-6ac805a456f5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.640823] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Created folder: Project (cf586645a56d4614a40b01d686d0c8bc) in parent group-v942928. [ 538.641119] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Creating folder: Instances. Parent ref: group-v942932. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 538.641367] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5b8d20d-6c3b-4cc0-b34d-e800f8be73ab {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.653029] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Created folder: Instances in parent group-v942932. [ 538.653497] env[62730]: DEBUG oslo.service.loopingcall [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 538.653738] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 538.653949] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-713cfbb7-94f7-46d0-83c8-195c21978afc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.679360] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 538.679360] env[62730]: value = "task-4837056" [ 538.679360] env[62730]: _type = "Task" [ 538.679360] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.696140] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837056, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.192565] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837056, 'name': CreateVM_Task} progress is 99%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.573310] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Successfully created port: f0ca2f7d-d371-433b-ac47-62e8f0b5b139 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.691522] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837056, 'name': CreateVM_Task, 'duration_secs': 0.613501} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 539.691823] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 539.738050] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.738253] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.738593] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 539.738877] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf544cdf-ff82-41a1-a5cf-83454cfbe4a6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.750805] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Waiting for the task: (returnval){ [ 539.750805] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5271ee28-b391-3125-5c63-b503c26124da" [ 539.750805] env[62730]: _type = "Task" [ 539.750805] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.762136] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5271ee28-b391-3125-5c63-b503c26124da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.139627] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "318f7880-c500-40b8-9ca1-d8a857b36a88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.139871] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.155440] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 540.268425] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.269396] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.270706] env[62730]: INFO nova.compute.claims [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.274021] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.274021] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 540.274021] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.538746] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aba7dd0-c595-4a94-ae3a-adb3dddcd1f3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.557390] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf4e116-d549-4b5a-9d99-b7148348d0a0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.593850] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b88637-7779-406e-83d8-c7e69edaf9c3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.606686] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e381d0-506b-40a9-a122-96fd5a8f70f8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.622697] env[62730]: DEBUG nova.compute.provider_tree [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.634948] env[62730]: DEBUG nova.scheduler.client.report [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 540.656717] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.657547] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 540.728619] env[62730]: DEBUG nova.compute.utils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.730556] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 540.730722] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 540.745447] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 540.841730] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 540.890983] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 540.891907] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 540.891907] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 540.892157] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 540.892313] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 540.892855] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 540.892855] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 540.892855] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 540.893174] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 540.893703] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 540.893862] env[62730]: DEBUG nova.virt.hardware [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 540.895138] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed95df92-64c2-4c82-8bbf-93cb9fe15260 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.905165] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59811537-982e-4790-b6e9-61f39060363d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.938859] env[62730]: DEBUG nova.policy [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bdba3bd0da54f458172e179a1f368ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76b57f69c45049f4b76e1ea4c1f78513', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 540.941993] env[62730]: DEBUG nova.compute.manager [req-92cc719f-eed4-4e5a-98c8-eb4f236fe6d6 req-9e48cfb6-fd01-4600-9a35-e0b367bd566d service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Received event network-vif-plugged-d06f0929-6eb5-42a8-92f0-83dbbff25ec7 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 540.942142] env[62730]: DEBUG oslo_concurrency.lockutils [req-92cc719f-eed4-4e5a-98c8-eb4f236fe6d6 req-9e48cfb6-fd01-4600-9a35-e0b367bd566d service nova] Acquiring lock "1ffe728f-e01d-4fbc-9e67-1c4868bae8e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.942327] env[62730]: DEBUG oslo_concurrency.lockutils [req-92cc719f-eed4-4e5a-98c8-eb4f236fe6d6 req-9e48cfb6-fd01-4600-9a35-e0b367bd566d service nova] Lock "1ffe728f-e01d-4fbc-9e67-1c4868bae8e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.942756] env[62730]: DEBUG oslo_concurrency.lockutils [req-92cc719f-eed4-4e5a-98c8-eb4f236fe6d6 req-9e48cfb6-fd01-4600-9a35-e0b367bd566d service nova] Lock "1ffe728f-e01d-4fbc-9e67-1c4868bae8e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.942756] env[62730]: DEBUG nova.compute.manager [req-92cc719f-eed4-4e5a-98c8-eb4f236fe6d6 req-9e48cfb6-fd01-4600-9a35-e0b367bd566d service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] No waiting events found dispatching network-vif-plugged-d06f0929-6eb5-42a8-92f0-83dbbff25ec7 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 540.942877] env[62730]: WARNING nova.compute.manager [req-92cc719f-eed4-4e5a-98c8-eb4f236fe6d6 req-9e48cfb6-fd01-4600-9a35-e0b367bd566d service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Received unexpected event network-vif-plugged-d06f0929-6eb5-42a8-92f0-83dbbff25ec7 for instance with vm_state building and task_state spawning. [ 541.153121] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Successfully updated port: 90cea150-d842-4950-a9e9-4df14e8d24d6 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 541.192263] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "refresh_cache-ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.192407] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired lock "refresh_cache-ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.192598] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 541.500655] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Successfully updated port: c52225fa-e5ef-4fe5-970f-ba05d59bebfb {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 541.508179] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.514981] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "refresh_cache-736075f4-302b-4b1a-9358-7fe2fb73a36f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.515151] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquired lock "refresh_cache-736075f4-302b-4b1a-9358-7fe2fb73a36f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.515402] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 541.711502] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.043982] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Successfully updated port: f0ca2f7d-d371-433b-ac47-62e8f0b5b139 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 542.061196] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "refresh_cache-0a718440-a0f8-4614-a9f3-553b2ff2e156" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.061196] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquired lock "refresh_cache-0a718440-a0f8-4614-a9f3-553b2ff2e156" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.061196] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 542.178402] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.218403] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Updating instance_info_cache with network_info: [{"id": "90cea150-d842-4950-a9e9-4df14e8d24d6", "address": "fa:16:3e:52:eb:b9", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90cea150-d8", "ovs_interfaceid": "90cea150-d842-4950-a9e9-4df14e8d24d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.243531] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Releasing lock "refresh_cache-ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.244606] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Instance network_info: |[{"id": "90cea150-d842-4950-a9e9-4df14e8d24d6", "address": "fa:16:3e:52:eb:b9", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90cea150-d8", "ovs_interfaceid": "90cea150-d842-4950-a9e9-4df14e8d24d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 542.244711] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:eb:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90cea150-d842-4950-a9e9-4df14e8d24d6', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 542.255037] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Creating folder: Project (7ae994dbceb044ef8c023cb31350f1ad). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 542.255959] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa9c1466-866a-45c2-8a4d-54897ad008e3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.272814] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Created folder: Project (7ae994dbceb044ef8c023cb31350f1ad) in parent group-v942928. [ 542.273190] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Creating folder: Instances. Parent ref: group-v942935. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 542.273922] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18432c8a-aa1f-4c0a-a6b0-1d2ba18ebf53 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.289293] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Created folder: Instances in parent group-v942935. [ 542.290362] env[62730]: DEBUG oslo.service.loopingcall [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.291027] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 542.291467] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f2b97e2-86fe-447f-91bf-2e4f38d29e50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.316451] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 542.316451] env[62730]: value = "task-4837059" [ 542.316451] env[62730]: _type = "Task" [ 542.316451] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.330398] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837059, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.338460] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Updating instance_info_cache with network_info: [{"id": "c52225fa-e5ef-4fe5-970f-ba05d59bebfb", "address": "fa:16:3e:2e:93:e7", "network": {"id": "57d738f7-c6e2-41a9-8e48-eb4a47b6b69c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-350650333-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42256023b89344de90ced8c51fd48cf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc52225fa-e5", "ovs_interfaceid": "c52225fa-e5ef-4fe5-970f-ba05d59bebfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.358139] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Releasing lock "refresh_cache-736075f4-302b-4b1a-9358-7fe2fb73a36f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.358398] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Instance network_info: |[{"id": "c52225fa-e5ef-4fe5-970f-ba05d59bebfb", "address": "fa:16:3e:2e:93:e7", "network": {"id": "57d738f7-c6e2-41a9-8e48-eb4a47b6b69c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-350650333-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42256023b89344de90ced8c51fd48cf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc52225fa-e5", "ovs_interfaceid": "c52225fa-e5ef-4fe5-970f-ba05d59bebfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 542.358853] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:93:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '42f08482-a1da-405d-9918-d733d9f5173c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c52225fa-e5ef-4fe5-970f-ba05d59bebfb', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 542.366607] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Creating folder: Project (42256023b89344de90ced8c51fd48cf6). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 542.367430] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aede7a24-034e-4e26-b8f2-7b77fe8cfeb4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.382130] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Created folder: Project (42256023b89344de90ced8c51fd48cf6) in parent group-v942928. [ 542.382130] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Creating folder: Instances. Parent ref: group-v942938. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 542.382130] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d652559-77b9-440e-9424-19d9d9fe8394 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.398830] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Created folder: Instances in parent group-v942938. [ 542.398830] env[62730]: DEBUG oslo.service.loopingcall [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.400655] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 542.401829] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Successfully created port: 05bebb3c-894d-4f8e-891a-5c7f5a3bde57 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.403874] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd5d4063-643d-4907-963a-15d273f037aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.432105] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 542.432105] env[62730]: value = "task-4837062" [ 542.432105] env[62730]: _type = "Task" [ 542.432105] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.444077] env[62730]: DEBUG nova.compute.manager [req-ba862022-7d7f-43fa-9188-baf4721f21bd req-4677761d-d0c7-4c3e-b41f-2c24c4f3be53 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Received event network-vif-plugged-90cea150-d842-4950-a9e9-4df14e8d24d6 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 542.444077] env[62730]: DEBUG oslo_concurrency.lockutils [req-ba862022-7d7f-43fa-9188-baf4721f21bd req-4677761d-d0c7-4c3e-b41f-2c24c4f3be53 service nova] Acquiring lock "ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.444077] env[62730]: DEBUG oslo_concurrency.lockutils [req-ba862022-7d7f-43fa-9188-baf4721f21bd req-4677761d-d0c7-4c3e-b41f-2c24c4f3be53 service nova] Lock "ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.444077] env[62730]: DEBUG oslo_concurrency.lockutils [req-ba862022-7d7f-43fa-9188-baf4721f21bd req-4677761d-d0c7-4c3e-b41f-2c24c4f3be53 service nova] Lock "ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.444349] env[62730]: DEBUG nova.compute.manager [req-ba862022-7d7f-43fa-9188-baf4721f21bd req-4677761d-d0c7-4c3e-b41f-2c24c4f3be53 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] No waiting events found dispatching network-vif-plugged-90cea150-d842-4950-a9e9-4df14e8d24d6 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 542.444349] env[62730]: WARNING nova.compute.manager [req-ba862022-7d7f-43fa-9188-baf4721f21bd req-4677761d-d0c7-4c3e-b41f-2c24c4f3be53 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Received unexpected event network-vif-plugged-90cea150-d842-4950-a9e9-4df14e8d24d6 for instance with vm_state building and task_state spawning. [ 542.450528] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837062, 'name': CreateVM_Task} progress is 6%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.749449] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.749449] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.749449] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 542.749449] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 542.770430] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 542.770430] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 542.771052] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 542.771327] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 542.771454] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 542.771578] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 542.771728] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 542.772271] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.772530] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.772722] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.772909] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.773336] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.774586] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.774586] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 542.774586] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.793774] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.795095] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.795387] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.795574] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 542.797061] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc0173f-ae4b-4e05-891a-2e0720da4c7f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.807752] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834b0432-63a4-48fe-9ea7-bea920e366ca {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.831167] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9ba26d-5d00-47eb-bb03-d305df6a21e5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.845905] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837059, 'name': CreateVM_Task, 'duration_secs': 0.406266} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.846318] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 542.847187] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.847676] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.847871] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 542.848829] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854ab55c-c8e2-4c17-bcd2-e5e4bc400c6c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.852266] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a4d1cc-a37d-4fa7-9d43-a2e19e6b996f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.859152] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for the task: (returnval){ [ 542.859152] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52fd3e6a-d7b3-55fe-202d-7e1e2c6ce94b" [ 542.859152] env[62730]: _type = "Task" [ 542.859152] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.887671] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180551MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 542.887841] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.888042] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.902434] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.902434] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 542.902434] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.951455] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837062, 'name': CreateVM_Task, 'duration_secs': 0.417584} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.951455] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 542.951925] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.952095] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.953195] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 542.953195] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9ff86cd-55f5-4ae1-886a-428971049a98 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.960787] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Waiting for the task: (returnval){ [ 542.960787] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5235c99e-cf4c-fb42-16bc-2d474b21e12f" [ 542.960787] env[62730]: _type = "Task" [ 542.960787] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.973477] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5235c99e-cf4c-fb42-16bc-2d474b21e12f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.015089] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4b189162-95ca-4480-82a1-2025371f235a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 543.015230] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 543.015681] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 543.015681] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 736075f4-302b-4b1a-9358-7fe2fb73a36f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 543.015681] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0a718440-a0f8-4614-a9f3-553b2ff2e156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 543.015817] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 318f7880-c500-40b8-9ca1-d8a857b36a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 543.016690] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 543.016690] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '6', 'num_vm_building': '6', 'num_task_spawning': '6', 'num_os_type_None': '6', 'num_proj_dd64bc14ec224b22acbe0598227d9504': '1', 'io_workload': '6', 'num_proj_cf586645a56d4614a40b01d686d0c8bc': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_42256023b89344de90ced8c51fd48cf6': '1', 'num_proj_b825311d36404f199e86101b21b30ad5': '1', 'num_proj_76b57f69c45049f4b76e1ea4c1f78513': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 543.130179] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Updating instance_info_cache with network_info: [{"id": "f0ca2f7d-d371-433b-ac47-62e8f0b5b139", "address": "fa:16:3e:71:cb:cf", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ca2f7d-d3", "ovs_interfaceid": "f0ca2f7d-d371-433b-ac47-62e8f0b5b139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.156187] env[62730]: DEBUG nova.compute.manager [req-528cfc10-e0f1-4ef5-8645-6420c9aa0406 req-23a1d102-aa5d-4cc3-a055-031bb6998f65 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Received event network-vif-plugged-c52225fa-e5ef-4fe5-970f-ba05d59bebfb {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 543.156187] env[62730]: DEBUG oslo_concurrency.lockutils [req-528cfc10-e0f1-4ef5-8645-6420c9aa0406 req-23a1d102-aa5d-4cc3-a055-031bb6998f65 service nova] Acquiring lock "736075f4-302b-4b1a-9358-7fe2fb73a36f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.156274] env[62730]: DEBUG oslo_concurrency.lockutils [req-528cfc10-e0f1-4ef5-8645-6420c9aa0406 req-23a1d102-aa5d-4cc3-a055-031bb6998f65 service nova] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.156458] env[62730]: DEBUG oslo_concurrency.lockutils [req-528cfc10-e0f1-4ef5-8645-6420c9aa0406 req-23a1d102-aa5d-4cc3-a055-031bb6998f65 service nova] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.156642] env[62730]: DEBUG nova.compute.manager [req-528cfc10-e0f1-4ef5-8645-6420c9aa0406 req-23a1d102-aa5d-4cc3-a055-031bb6998f65 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] No waiting events found dispatching network-vif-plugged-c52225fa-e5ef-4fe5-970f-ba05d59bebfb {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 543.156963] env[62730]: WARNING nova.compute.manager [req-528cfc10-e0f1-4ef5-8645-6420c9aa0406 req-23a1d102-aa5d-4cc3-a055-031bb6998f65 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Received unexpected event network-vif-plugged-c52225fa-e5ef-4fe5-970f-ba05d59bebfb for instance with vm_state building and task_state spawning. [ 543.158243] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Releasing lock "refresh_cache-0a718440-a0f8-4614-a9f3-553b2ff2e156" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.158338] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Instance network_info: |[{"id": "f0ca2f7d-d371-433b-ac47-62e8f0b5b139", "address": "fa:16:3e:71:cb:cf", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ca2f7d-d3", "ovs_interfaceid": "f0ca2f7d-d371-433b-ac47-62e8f0b5b139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 543.163342] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:cb:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0ca2f7d-d371-433b-ac47-62e8f0b5b139', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 543.171757] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Creating folder: Project (b825311d36404f199e86101b21b30ad5). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 543.176652] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb2feab4-1d2e-4140-91ba-9828f0fa376c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.191561] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Created folder: Project (b825311d36404f199e86101b21b30ad5) in parent group-v942928. [ 543.191561] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Creating folder: Instances. Parent ref: group-v942941. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 543.191561] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e72db77-e9fc-47d7-af8d-d6da796d544c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.199598] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b627537-5d3b-43bb-a63f-0630b40bf511 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.207116] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Created folder: Instances in parent group-v942941. [ 543.207116] env[62730]: DEBUG oslo.service.loopingcall [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 543.207116] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 543.207726] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db8052d7-38b7-431b-8f04-83fd148b8b8b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.226689] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ec84e1-3fd0-467b-afe1-60389a079354 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.268472] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5e74e2-1925-4dcb-9fdc-3091b51c510c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.272414] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 543.272414] env[62730]: value = "task-4837065" [ 543.272414] env[62730]: _type = "Task" [ 543.272414] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.284307] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15e5188-97cf-4f28-81cf-e58fa3bf08ed {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.292266] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837065, 'name': CreateVM_Task} progress is 15%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.304102] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.348034] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.376411] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 543.376920] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.489s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.475537] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.477012] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 543.477496] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.784761] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837065, 'name': CreateVM_Task, 'duration_secs': 0.523527} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.784997] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 543.786547] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.786547] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.786547] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 543.786547] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64f1f8c4-518c-4173-b6e3-2288febf362c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.792634] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Waiting for the task: (returnval){ [ 543.792634] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52f534af-0cff-2450-e811-ab3b0323200d" [ 543.792634] env[62730]: _type = "Task" [ 543.792634] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.804637] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52f534af-0cff-2450-e811-ab3b0323200d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.965870] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "16f7dfdb-2063-4992-9f40-4b332006940f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.965870] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "16f7dfdb-2063-4992-9f40-4b332006940f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.030451] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 544.153643] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.153960] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.156307] env[62730]: INFO nova.compute.claims [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.309048] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.309911] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 544.309911] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.466405] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32857c37-e9ba-4d3e-9bec-7585acac85a9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.474931] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c490b56-8fef-4539-9f77-47151730ef83 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.521753] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2122015-5802-4d84-b94f-8511a612aa9e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.534748] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7080ab31-92e5-43fd-8fcc-879bfc731276 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.550911] env[62730]: DEBUG nova.compute.provider_tree [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.562015] env[62730]: DEBUG nova.scheduler.client.report [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.577779] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.424s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.578295] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 544.622680] env[62730]: DEBUG nova.compute.manager [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Received event network-changed-d06f0929-6eb5-42a8-92f0-83dbbff25ec7 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 544.622680] env[62730]: DEBUG nova.compute.manager [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Refreshing instance network info cache due to event network-changed-d06f0929-6eb5-42a8-92f0-83dbbff25ec7. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 544.622945] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Acquiring lock "refresh_cache-1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.623299] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Acquired lock "refresh_cache-1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.623749] env[62730]: DEBUG nova.network.neutron [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Refreshing network info cache for port d06f0929-6eb5-42a8-92f0-83dbbff25ec7 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 544.637078] env[62730]: DEBUG nova.compute.utils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.642457] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 544.642457] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.649246] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 544.751274] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 544.780492] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.780796] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.780990] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.781892] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.781985] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.782770] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.783023] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.783194] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.783358] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.783562] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.783746] env[62730]: DEBUG nova.virt.hardware [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.784683] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08d22e0-bb1f-4fd7-b8a1-a689b669240d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.796272] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77761fa-36b9-4935-b5eb-d62aa42d3355 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.857759] env[62730]: DEBUG nova.policy [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a29c298774104160b3753f5fcca7d1c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf705f506bcc4409881416d80a745afc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 545.996035] env[62730]: DEBUG nova.network.neutron [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Updated VIF entry in instance network info cache for port d06f0929-6eb5-42a8-92f0-83dbbff25ec7. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 545.996633] env[62730]: DEBUG nova.network.neutron [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Updating instance_info_cache with network_info: [{"id": "d06f0929-6eb5-42a8-92f0-83dbbff25ec7", "address": "fa:16:3e:1a:b8:4c", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd06f0929-6e", "ovs_interfaceid": "d06f0929-6eb5-42a8-92f0-83dbbff25ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.012950] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Releasing lock "refresh_cache-1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.013223] env[62730]: DEBUG nova.compute.manager [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Received event network-vif-plugged-f0ca2f7d-d371-433b-ac47-62e8f0b5b139 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 546.015515] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Acquiring lock "0a718440-a0f8-4614-a9f3-553b2ff2e156-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.015831] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.016040] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.016584] env[62730]: DEBUG nova.compute.manager [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] No waiting events found dispatching network-vif-plugged-f0ca2f7d-d371-433b-ac47-62e8f0b5b139 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 546.016584] env[62730]: WARNING nova.compute.manager [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Received unexpected event network-vif-plugged-f0ca2f7d-d371-433b-ac47-62e8f0b5b139 for instance with vm_state building and task_state spawning. [ 546.016584] env[62730]: DEBUG nova.compute.manager [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Received event network-changed-f0ca2f7d-d371-433b-ac47-62e8f0b5b139 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 546.016758] env[62730]: DEBUG nova.compute.manager [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Refreshing instance network info cache due to event network-changed-f0ca2f7d-d371-433b-ac47-62e8f0b5b139. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 546.016915] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Acquiring lock "refresh_cache-0a718440-a0f8-4614-a9f3-553b2ff2e156" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.017069] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Acquired lock "refresh_cache-0a718440-a0f8-4614-a9f3-553b2ff2e156" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.017238] env[62730]: DEBUG nova.network.neutron [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Refreshing network info cache for port f0ca2f7d-d371-433b-ac47-62e8f0b5b139 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 546.542416] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Successfully created port: 96c4afce-4e4a-4ab9-b455-c651af8e7a0e {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.750480] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Successfully updated port: 05bebb3c-894d-4f8e-891a-5c7f5a3bde57 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 546.767513] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "refresh_cache-318f7880-c500-40b8-9ca1-d8a857b36a88" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.767513] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquired lock "refresh_cache-318f7880-c500-40b8-9ca1-d8a857b36a88" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.767513] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 546.935329] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.935573] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.950721] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.954207] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.041641] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.042299] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.003s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.046690] env[62730]: INFO nova.compute.claims [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 547.284992] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f317b793-e1a3-4a03-9fe6-8844205b124b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.295805] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604c92b8-8655-4f48-85ee-8d2f4c08623a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.342279] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff09deb-fd98-42b4-b0d3-4bb4f2399e7e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.350907] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e3e8fa-e043-4375-b408-477d34169df1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.368796] env[62730]: DEBUG nova.compute.provider_tree [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.386057] env[62730]: DEBUG nova.scheduler.client.report [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.404320] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.362s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.404979] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 547.468467] env[62730]: DEBUG nova.compute.utils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.469866] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 547.470610] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 547.483080] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 547.511505] env[62730]: DEBUG nova.compute.manager [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Received event network-changed-90cea150-d842-4950-a9e9-4df14e8d24d6 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 547.511505] env[62730]: DEBUG nova.compute.manager [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Refreshing instance network info cache due to event network-changed-90cea150-d842-4950-a9e9-4df14e8d24d6. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 547.511505] env[62730]: DEBUG oslo_concurrency.lockutils [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] Acquiring lock "refresh_cache-ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.512208] env[62730]: DEBUG oslo_concurrency.lockutils [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] Acquired lock "refresh_cache-ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.512615] env[62730]: DEBUG nova.network.neutron [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Refreshing network info cache for port 90cea150-d842-4950-a9e9-4df14e8d24d6 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 547.534704] env[62730]: DEBUG nova.compute.manager [req-af294bed-3994-456a-8800-b9215c4a278e req-3ea3f64c-98d9-4ffc-bfda-b8b890dc9ea0 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Received event network-vif-plugged-05bebb3c-894d-4f8e-891a-5c7f5a3bde57 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 547.534924] env[62730]: DEBUG oslo_concurrency.lockutils [req-af294bed-3994-456a-8800-b9215c4a278e req-3ea3f64c-98d9-4ffc-bfda-b8b890dc9ea0 service nova] Acquiring lock "318f7880-c500-40b8-9ca1-d8a857b36a88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.535242] env[62730]: DEBUG oslo_concurrency.lockutils [req-af294bed-3994-456a-8800-b9215c4a278e req-3ea3f64c-98d9-4ffc-bfda-b8b890dc9ea0 service nova] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.535412] env[62730]: DEBUG oslo_concurrency.lockutils [req-af294bed-3994-456a-8800-b9215c4a278e req-3ea3f64c-98d9-4ffc-bfda-b8b890dc9ea0 service nova] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.535627] env[62730]: DEBUG nova.compute.manager [req-af294bed-3994-456a-8800-b9215c4a278e req-3ea3f64c-98d9-4ffc-bfda-b8b890dc9ea0 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] No waiting events found dispatching network-vif-plugged-05bebb3c-894d-4f8e-891a-5c7f5a3bde57 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 547.535844] env[62730]: WARNING nova.compute.manager [req-af294bed-3994-456a-8800-b9215c4a278e req-3ea3f64c-98d9-4ffc-bfda-b8b890dc9ea0 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Received unexpected event network-vif-plugged-05bebb3c-894d-4f8e-891a-5c7f5a3bde57 for instance with vm_state building and task_state spawning. [ 547.573689] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 547.605360] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.605648] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.605823] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.606017] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.606296] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.606456] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.606885] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.606885] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.607014] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.607168] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.607818] env[62730]: DEBUG nova.virt.hardware [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.609864] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad675af6-ea1d-4435-b54b-ca99a59185a4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.620353] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ba0a85-8251-4923-97fa-437a0187bd2d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.862845] env[62730]: DEBUG nova.policy [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a29c298774104160b3753f5fcca7d1c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf705f506bcc4409881416d80a745afc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 547.938267] env[62730]: DEBUG nova.network.neutron [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Updated VIF entry in instance network info cache for port f0ca2f7d-d371-433b-ac47-62e8f0b5b139. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 547.938636] env[62730]: DEBUG nova.network.neutron [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Updating instance_info_cache with network_info: [{"id": "f0ca2f7d-d371-433b-ac47-62e8f0b5b139", "address": "fa:16:3e:71:cb:cf", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ca2f7d-d3", "ovs_interfaceid": "f0ca2f7d-d371-433b-ac47-62e8f0b5b139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.949194] env[62730]: DEBUG oslo_concurrency.lockutils [req-b6768f20-e7b9-4882-b30a-5f532f1534a1 req-1b1c6b90-3f43-44f8-a6c7-402fc083e995 service nova] Releasing lock "refresh_cache-0a718440-a0f8-4614-a9f3-553b2ff2e156" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.098892] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Updating instance_info_cache with network_info: [{"id": "05bebb3c-894d-4f8e-891a-5c7f5a3bde57", "address": "fa:16:3e:30:f9:80", "network": {"id": "ac500f32-8661-4cb1-a4fc-d2785ffc23a9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-921636943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76b57f69c45049f4b76e1ea4c1f78513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bebb3c-89", "ovs_interfaceid": "05bebb3c-894d-4f8e-891a-5c7f5a3bde57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.120445] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Releasing lock "refresh_cache-318f7880-c500-40b8-9ca1-d8a857b36a88" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.120997] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Instance network_info: |[{"id": "05bebb3c-894d-4f8e-891a-5c7f5a3bde57", "address": "fa:16:3e:30:f9:80", "network": {"id": "ac500f32-8661-4cb1-a4fc-d2785ffc23a9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-921636943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76b57f69c45049f4b76e1ea4c1f78513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bebb3c-89", "ovs_interfaceid": "05bebb3c-894d-4f8e-891a-5c7f5a3bde57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 548.121927] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:f9:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05bebb3c-894d-4f8e-891a-5c7f5a3bde57', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 548.133683] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Creating folder: Project (76b57f69c45049f4b76e1ea4c1f78513). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 548.134387] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17a9c921-752b-403e-9e53-1e5cd814b5e7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.147495] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Created folder: Project (76b57f69c45049f4b76e1ea4c1f78513) in parent group-v942928. [ 548.147869] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Creating folder: Instances. Parent ref: group-v942944. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 548.148682] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b13819a-474b-4279-9da2-31101793748d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.159721] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Created folder: Instances in parent group-v942944. [ 548.159994] env[62730]: DEBUG oslo.service.loopingcall [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.160194] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 548.160406] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24893ecc-91eb-4591-8299-1433db49b3fa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.183520] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 548.183520] env[62730]: value = "task-4837068" [ 548.183520] env[62730]: _type = "Task" [ 548.183520] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.195304] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837068, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.700031] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837068, 'name': CreateVM_Task, 'duration_secs': 0.370062} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.700238] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 548.700935] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.701122] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.701458] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 548.701785] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35b2fd42-b4ea-4bd3-a36c-38ebb6e20f3c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.708396] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Waiting for the task: (returnval){ [ 548.708396] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]526ac3dd-1dda-30ec-51d9-ac4e46c07c40" [ 548.708396] env[62730]: _type = "Task" [ 548.708396] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.720838] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]526ac3dd-1dda-30ec-51d9-ac4e46c07c40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.225762] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.226480] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 549.226480] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.235671] env[62730]: DEBUG nova.compute.manager [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Received event network-changed-05bebb3c-894d-4f8e-891a-5c7f5a3bde57 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 550.236010] env[62730]: DEBUG nova.compute.manager [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Refreshing instance network info cache due to event network-changed-05bebb3c-894d-4f8e-891a-5c7f5a3bde57. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 550.236168] env[62730]: DEBUG oslo_concurrency.lockutils [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] Acquiring lock "refresh_cache-318f7880-c500-40b8-9ca1-d8a857b36a88" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.236314] env[62730]: DEBUG oslo_concurrency.lockutils [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] Acquired lock "refresh_cache-318f7880-c500-40b8-9ca1-d8a857b36a88" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.236481] env[62730]: DEBUG nova.network.neutron [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Refreshing network info cache for port 05bebb3c-894d-4f8e-891a-5c7f5a3bde57 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 550.414314] env[62730]: DEBUG nova.network.neutron [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Updated VIF entry in instance network info cache for port 90cea150-d842-4950-a9e9-4df14e8d24d6. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 550.414314] env[62730]: DEBUG nova.network.neutron [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Updating instance_info_cache with network_info: [{"id": "90cea150-d842-4950-a9e9-4df14e8d24d6", "address": "fa:16:3e:52:eb:b9", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90cea150-d8", "ovs_interfaceid": "90cea150-d842-4950-a9e9-4df14e8d24d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.426288] env[62730]: DEBUG oslo_concurrency.lockutils [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] Releasing lock "refresh_cache-ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.426544] env[62730]: DEBUG nova.compute.manager [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Received event network-changed-c52225fa-e5ef-4fe5-970f-ba05d59bebfb {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 550.426715] env[62730]: DEBUG nova.compute.manager [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Refreshing instance network info cache due to event network-changed-c52225fa-e5ef-4fe5-970f-ba05d59bebfb. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 550.426916] env[62730]: DEBUG oslo_concurrency.lockutils [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] Acquiring lock "refresh_cache-736075f4-302b-4b1a-9358-7fe2fb73a36f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.427068] env[62730]: DEBUG oslo_concurrency.lockutils [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] Acquired lock "refresh_cache-736075f4-302b-4b1a-9358-7fe2fb73a36f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.427226] env[62730]: DEBUG nova.network.neutron [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Refreshing network info cache for port c52225fa-e5ef-4fe5-970f-ba05d59bebfb {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 551.321816] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Successfully created port: df1b66fc-9ef1-457f-8ecd-ba4cafc2c378 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 551.841756] env[62730]: DEBUG nova.network.neutron [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Updated VIF entry in instance network info cache for port 05bebb3c-894d-4f8e-891a-5c7f5a3bde57. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 551.841756] env[62730]: DEBUG nova.network.neutron [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Updating instance_info_cache with network_info: [{"id": "05bebb3c-894d-4f8e-891a-5c7f5a3bde57", "address": "fa:16:3e:30:f9:80", "network": {"id": "ac500f32-8661-4cb1-a4fc-d2785ffc23a9", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-921636943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76b57f69c45049f4b76e1ea4c1f78513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bebb3c-89", "ovs_interfaceid": "05bebb3c-894d-4f8e-891a-5c7f5a3bde57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.855576] env[62730]: DEBUG oslo_concurrency.lockutils [req-0fe11b30-87e2-438f-bc6c-630c466312ed req-2a1603ac-bf22-4404-a2d7-7cd5e0028172 service nova] Releasing lock "refresh_cache-318f7880-c500-40b8-9ca1-d8a857b36a88" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.597687] env[62730]: DEBUG nova.network.neutron [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Updated VIF entry in instance network info cache for port c52225fa-e5ef-4fe5-970f-ba05d59bebfb. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 552.597687] env[62730]: DEBUG nova.network.neutron [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Updating instance_info_cache with network_info: [{"id": "c52225fa-e5ef-4fe5-970f-ba05d59bebfb", "address": "fa:16:3e:2e:93:e7", "network": {"id": "57d738f7-c6e2-41a9-8e48-eb4a47b6b69c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-350650333-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42256023b89344de90ced8c51fd48cf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc52225fa-e5", "ovs_interfaceid": "c52225fa-e5ef-4fe5-970f-ba05d59bebfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.612500] env[62730]: DEBUG oslo_concurrency.lockutils [req-88e487ba-4980-4557-b482-b3018009ee7e req-483fd08f-d3a4-49bf-9cc2-7a9a61b437c8 service nova] Releasing lock "refresh_cache-736075f4-302b-4b1a-9358-7fe2fb73a36f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.651109] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Successfully updated port: 96c4afce-4e4a-4ab9-b455-c651af8e7a0e {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.663374] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "refresh_cache-16f7dfdb-2063-4992-9f40-4b332006940f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.663374] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired lock "refresh_cache-16f7dfdb-2063-4992-9f40-4b332006940f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.663374] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.884252] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.918326] env[62730]: DEBUG nova.compute.manager [req-ff21e8c3-ef68-4ae8-97d0-dd8c3f40d90f req-f829a70d-986a-4b35-9079-55e3664dea52 service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Received event network-vif-plugged-96c4afce-4e4a-4ab9-b455-c651af8e7a0e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 553.918326] env[62730]: DEBUG oslo_concurrency.lockutils [req-ff21e8c3-ef68-4ae8-97d0-dd8c3f40d90f req-f829a70d-986a-4b35-9079-55e3664dea52 service nova] Acquiring lock "16f7dfdb-2063-4992-9f40-4b332006940f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.919948] env[62730]: DEBUG oslo_concurrency.lockutils [req-ff21e8c3-ef68-4ae8-97d0-dd8c3f40d90f req-f829a70d-986a-4b35-9079-55e3664dea52 service nova] Lock "16f7dfdb-2063-4992-9f40-4b332006940f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.920260] env[62730]: DEBUG oslo_concurrency.lockutils [req-ff21e8c3-ef68-4ae8-97d0-dd8c3f40d90f req-f829a70d-986a-4b35-9079-55e3664dea52 service nova] Lock "16f7dfdb-2063-4992-9f40-4b332006940f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.920535] env[62730]: DEBUG nova.compute.manager [req-ff21e8c3-ef68-4ae8-97d0-dd8c3f40d90f req-f829a70d-986a-4b35-9079-55e3664dea52 service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] No waiting events found dispatching network-vif-plugged-96c4afce-4e4a-4ab9-b455-c651af8e7a0e {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 553.920763] env[62730]: WARNING nova.compute.manager [req-ff21e8c3-ef68-4ae8-97d0-dd8c3f40d90f req-f829a70d-986a-4b35-9079-55e3664dea52 service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Received unexpected event network-vif-plugged-96c4afce-4e4a-4ab9-b455-c651af8e7a0e for instance with vm_state building and task_state spawning. [ 554.139829] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Updating instance_info_cache with network_info: [{"id": "96c4afce-4e4a-4ab9-b455-c651af8e7a0e", "address": "fa:16:3e:8f:01:0d", "network": {"id": "c89e14b4-fff7-48e0-8a8c-29a838c3b6aa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1735078311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf705f506bcc4409881416d80a745afc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96c4afce-4e", "ovs_interfaceid": "96c4afce-4e4a-4ab9-b455-c651af8e7a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.159846] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Releasing lock "refresh_cache-16f7dfdb-2063-4992-9f40-4b332006940f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.160174] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Instance network_info: |[{"id": "96c4afce-4e4a-4ab9-b455-c651af8e7a0e", "address": "fa:16:3e:8f:01:0d", "network": {"id": "c89e14b4-fff7-48e0-8a8c-29a838c3b6aa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1735078311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf705f506bcc4409881416d80a745afc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96c4afce-4e", "ovs_interfaceid": "96c4afce-4e4a-4ab9-b455-c651af8e7a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.160594] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:01:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96c4afce-4e4a-4ab9-b455-c651af8e7a0e', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 554.173960] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating folder: Project (cf705f506bcc4409881416d80a745afc). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.174703] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-707b173e-2af8-4228-a18d-77f563842422 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.189992] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Created folder: Project (cf705f506bcc4409881416d80a745afc) in parent group-v942928. [ 554.190304] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating folder: Instances. Parent ref: group-v942947. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.190961] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49680a6c-a41b-4175-ac70-c6fe28a0f623 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.204308] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Created folder: Instances in parent group-v942947. [ 554.205772] env[62730]: DEBUG oslo.service.loopingcall [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.206595] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 554.206878] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7abdd10b-30bf-4d5a-af8a-fcd455506d13 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.238979] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 554.238979] env[62730]: value = "task-4837071" [ 554.238979] env[62730]: _type = "Task" [ 554.238979] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.251422] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837071, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.751685] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837071, 'name': CreateVM_Task, 'duration_secs': 0.346485} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.752586] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 554.753324] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.753480] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.753805] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.754089] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be7d3911-def8-40da-9fe9-8a650c8993ef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.761037] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 554.761037] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d50dc9-d9cb-da52-51a6-273807749ac0" [ 554.761037] env[62730]: _type = "Task" [ 554.761037] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.771280] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d50dc9-d9cb-da52-51a6-273807749ac0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.276450] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.276987] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.277267] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.585050] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.585308] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.600426] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 555.686984] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.687266] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.689310] env[62730]: INFO nova.compute.claims [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.916532] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Successfully updated port: df1b66fc-9ef1-457f-8ecd-ba4cafc2c378 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.935301] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "refresh_cache-d8ac549d-b27c-4d4a-a58b-de65bb5586f3" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.936033] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired lock "refresh_cache-d8ac549d-b27c-4d4a-a58b-de65bb5586f3" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.936033] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.967822] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b530ed1-e948-4c1e-8076-ba3e28c20ef4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.978755] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8ff7c0-4534-4182-88ae-b8126e3d2f37 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.015391] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab5c635-7568-4378-9f29-330cb7a9fd19 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.024153] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f27ddc-f244-4262-9918-78e7f5f70808 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.046238] env[62730]: DEBUG nova.compute.provider_tree [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.058608] env[62730]: DEBUG nova.scheduler.client.report [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.084815] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.397s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.085303] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 556.166574] env[62730]: DEBUG nova.compute.utils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 556.169878] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 556.169878] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 556.178754] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.195853] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 556.358459] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 556.396047] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.396305] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.396466] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.396649] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.396793] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.396938] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.398412] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.398648] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.398838] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.399016] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.399249] env[62730]: DEBUG nova.virt.hardware [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.401093] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638191cc-aad7-415c-913b-8f7302ef53aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.416263] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54feea23-1a8f-44c8-b3de-d3c3abf35bd1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.883176] env[62730]: DEBUG nova.policy [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3971a56d7b9142928c6f5ecf5b43c454', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e26a6097b8c4bf3b6d4b77656087f8c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 557.485246] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Updating instance_info_cache with network_info: [{"id": "df1b66fc-9ef1-457f-8ecd-ba4cafc2c378", "address": "fa:16:3e:36:ca:b8", "network": {"id": "c89e14b4-fff7-48e0-8a8c-29a838c3b6aa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1735078311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf705f506bcc4409881416d80a745afc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf1b66fc-9e", "ovs_interfaceid": "df1b66fc-9ef1-457f-8ecd-ba4cafc2c378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.520346] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Releasing lock "refresh_cache-d8ac549d-b27c-4d4a-a58b-de65bb5586f3" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.520568] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Instance network_info: |[{"id": "df1b66fc-9ef1-457f-8ecd-ba4cafc2c378", "address": "fa:16:3e:36:ca:b8", "network": {"id": "c89e14b4-fff7-48e0-8a8c-29a838c3b6aa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1735078311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf705f506bcc4409881416d80a745afc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf1b66fc-9e", "ovs_interfaceid": "df1b66fc-9ef1-457f-8ecd-ba4cafc2c378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 557.521335] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:ca:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df1b66fc-9ef1-457f-8ecd-ba4cafc2c378', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.529812] env[62730]: DEBUG oslo.service.loopingcall [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.531395] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 557.531395] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3233914-ca8c-434f-acec-4c33a5480959 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.562411] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.562411] env[62730]: value = "task-4837072" [ 557.562411] env[62730]: _type = "Task" [ 557.562411] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.572465] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837072, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.074964] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837072, 'name': CreateVM_Task, 'duration_secs': 0.420975} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.075285] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 558.075905] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.076084] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.076429] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.076665] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4aef09b-d53e-4b90-8704-1ebfe065d7e3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.085651] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 558.085651] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d92867-11fc-d3cb-ea7a-09c54a162842" [ 558.085651] env[62730]: _type = "Task" [ 558.085651] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.099293] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d92867-11fc-d3cb-ea7a-09c54a162842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.597262] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.597726] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.597873] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.923044] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "d90fd82e-a469-41c7-b414-c7eb5554e72a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.924079] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.937867] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 559.029164] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.029434] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.030971] env[62730]: INFO nova.compute.claims [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 559.288540] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef8c400-da1a-4319-bbc2-14cd42a3ca3f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.297452] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a020504-6569-4c6c-b506-e02d3283bf8e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.331912] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20818670-a7b5-41e9-8a9f-2b02d080a7aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.342108] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377d9ad9-a181-40ab-a8d5-0e4547982c7d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.358419] env[62730]: DEBUG nova.compute.provider_tree [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.373216] env[62730]: DEBUG nova.scheduler.client.report [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 559.400438] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.371s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.400917] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 559.473251] env[62730]: DEBUG nova.compute.utils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 559.474644] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 559.474820] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 559.495335] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 559.605186] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 559.647263] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 559.647548] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 559.647765] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.647991] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 559.648833] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.649435] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 559.649837] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 559.650541] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 559.653098] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 559.653098] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 559.653220] env[62730]: DEBUG nova.virt.hardware [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 559.654427] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c92ef9-6943-4ccf-adba-e827a4f811fb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.663640] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90c830f-0c20-41eb-99dd-69768b3f0fd1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.754192] env[62730]: DEBUG nova.compute.manager [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Received event network-changed-96c4afce-4e4a-4ab9-b455-c651af8e7a0e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 559.754406] env[62730]: DEBUG nova.compute.manager [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Refreshing instance network info cache due to event network-changed-96c4afce-4e4a-4ab9-b455-c651af8e7a0e. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 559.754631] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Acquiring lock "refresh_cache-16f7dfdb-2063-4992-9f40-4b332006940f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.754777] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Acquired lock "refresh_cache-16f7dfdb-2063-4992-9f40-4b332006940f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.754953] env[62730]: DEBUG nova.network.neutron [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Refreshing network info cache for port 96c4afce-4e4a-4ab9-b455-c651af8e7a0e {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 559.764590] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Successfully created port: 6df1fa79-5333-40c4-8385-faef7685c678 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 559.952706] env[62730]: DEBUG nova.policy [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3971a56d7b9142928c6f5ecf5b43c454', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e26a6097b8c4bf3b6d4b77656087f8c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 560.112109] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.112356] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.221771] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.221771] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.031482] env[62730]: DEBUG nova.network.neutron [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Updated VIF entry in instance network info cache for port 96c4afce-4e4a-4ab9-b455-c651af8e7a0e. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 562.031727] env[62730]: DEBUG nova.network.neutron [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Updating instance_info_cache with network_info: [{"id": "96c4afce-4e4a-4ab9-b455-c651af8e7a0e", "address": "fa:16:3e:8f:01:0d", "network": {"id": "c89e14b4-fff7-48e0-8a8c-29a838c3b6aa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1735078311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf705f506bcc4409881416d80a745afc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96c4afce-4e", "ovs_interfaceid": "96c4afce-4e4a-4ab9-b455-c651af8e7a0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.044420] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Releasing lock "refresh_cache-16f7dfdb-2063-4992-9f40-4b332006940f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.045813] env[62730]: DEBUG nova.compute.manager [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Received event network-vif-plugged-df1b66fc-9ef1-457f-8ecd-ba4cafc2c378 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.045813] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Acquiring lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.045813] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.045813] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.046127] env[62730]: DEBUG nova.compute.manager [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] No waiting events found dispatching network-vif-plugged-df1b66fc-9ef1-457f-8ecd-ba4cafc2c378 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 562.046127] env[62730]: WARNING nova.compute.manager [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Received unexpected event network-vif-plugged-df1b66fc-9ef1-457f-8ecd-ba4cafc2c378 for instance with vm_state building and task_state spawning. [ 562.046577] env[62730]: DEBUG nova.compute.manager [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Received event network-changed-df1b66fc-9ef1-457f-8ecd-ba4cafc2c378 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.046577] env[62730]: DEBUG nova.compute.manager [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Refreshing instance network info cache due to event network-changed-df1b66fc-9ef1-457f-8ecd-ba4cafc2c378. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 562.046577] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Acquiring lock "refresh_cache-d8ac549d-b27c-4d4a-a58b-de65bb5586f3" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.046722] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Acquired lock "refresh_cache-d8ac549d-b27c-4d4a-a58b-de65bb5586f3" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.046868] env[62730]: DEBUG nova.network.neutron [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Refreshing network info cache for port df1b66fc-9ef1-457f-8ecd-ba4cafc2c378 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.547711] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Successfully created port: 66e43dfe-4e24-438a-8ae6-122bad049292 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 563.955115] env[62730]: DEBUG nova.network.neutron [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Updated VIF entry in instance network info cache for port df1b66fc-9ef1-457f-8ecd-ba4cafc2c378. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.955115] env[62730]: DEBUG nova.network.neutron [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Updating instance_info_cache with network_info: [{"id": "df1b66fc-9ef1-457f-8ecd-ba4cafc2c378", "address": "fa:16:3e:36:ca:b8", "network": {"id": "c89e14b4-fff7-48e0-8a8c-29a838c3b6aa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1735078311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf705f506bcc4409881416d80a745afc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf1b66fc-9e", "ovs_interfaceid": "df1b66fc-9ef1-457f-8ecd-ba4cafc2c378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.965306] env[62730]: DEBUG oslo_concurrency.lockutils [req-9d2ebe29-4357-4ca6-afe4-7c6f998c9d9c req-300bb3cd-a6e7-4a00-83f9-b6dd46e02e1a service nova] Releasing lock "refresh_cache-d8ac549d-b27c-4d4a-a58b-de65bb5586f3" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.521703] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Successfully updated port: 6df1fa79-5333-40c4-8385-faef7685c678 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 564.540872] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "refresh_cache-2ed97ed9-4e81-484c-9f0e-baa6968b58a4" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.540872] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired lock "refresh_cache-2ed97ed9-4e81-484c-9f0e-baa6968b58a4" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.540872] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 564.687843] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.728439] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Updating instance_info_cache with network_info: [{"id": "6df1fa79-5333-40c4-8385-faef7685c678", "address": "fa:16:3e:da:e8:27", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.121", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6df1fa79-53", "ovs_interfaceid": "6df1fa79-5333-40c4-8385-faef7685c678", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.750589] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Releasing lock "refresh_cache-2ed97ed9-4e81-484c-9f0e-baa6968b58a4" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.751014] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Instance network_info: |[{"id": "6df1fa79-5333-40c4-8385-faef7685c678", "address": "fa:16:3e:da:e8:27", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.121", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6df1fa79-53", "ovs_interfaceid": "6df1fa79-5333-40c4-8385-faef7685c678", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 565.751357] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:e8:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6df1fa79-5333-40c4-8385-faef7685c678', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 565.761043] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating folder: Project (7e26a6097b8c4bf3b6d4b77656087f8c). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 565.762515] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1986458-6e82-435b-9cda-832092e02d62 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.776649] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Created folder: Project (7e26a6097b8c4bf3b6d4b77656087f8c) in parent group-v942928. [ 565.776869] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating folder: Instances. Parent ref: group-v942951. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 565.777306] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ef19450-ef87-4c0b-a19b-f90cdee4c787 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.789509] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Created folder: Instances in parent group-v942951. [ 565.789725] env[62730]: DEBUG oslo.service.loopingcall [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 565.790764] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 565.790764] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34f03b32-504d-4057-b6c6-8d3e6bc32fcd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.820194] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 565.820194] env[62730]: value = "task-4837075" [ 565.820194] env[62730]: _type = "Task" [ 565.820194] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.830075] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837075, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.282979] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Successfully updated port: 66e43dfe-4e24-438a-8ae6-122bad049292 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 566.303355] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "refresh_cache-d90fd82e-a469-41c7-b414-c7eb5554e72a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.303501] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired lock "refresh_cache-d90fd82e-a469-41c7-b414-c7eb5554e72a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.303677] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 566.335977] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837075, 'name': CreateVM_Task, 'duration_secs': 0.410024} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.336228] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 566.337035] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.337280] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.337531] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 566.339027] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e5e1367-a8ed-46ae-aa60-38df9754f278 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.348817] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 566.348817] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52948f42-b745-077b-da22-3d0231b5ab76" [ 566.348817] env[62730]: _type = "Task" [ 566.348817] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.363425] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52948f42-b745-077b-da22-3d0231b5ab76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.467223] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 566.648505] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "540af840-eba5-4cee-a37c-6d6809a24f95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.648748] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "540af840-eba5-4cee-a37c-6d6809a24f95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.861108] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.861108] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 566.861108] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.429585] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Updating instance_info_cache with network_info: [{"id": "66e43dfe-4e24-438a-8ae6-122bad049292", "address": "fa:16:3e:2e:c4:e8", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66e43dfe-4e", "ovs_interfaceid": "66e43dfe-4e24-438a-8ae6-122bad049292", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.444377] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Releasing lock "refresh_cache-d90fd82e-a469-41c7-b414-c7eb5554e72a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.444717] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Instance network_info: |[{"id": "66e43dfe-4e24-438a-8ae6-122bad049292", "address": "fa:16:3e:2e:c4:e8", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66e43dfe-4e", "ovs_interfaceid": "66e43dfe-4e24-438a-8ae6-122bad049292", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 567.445557] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:c4:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66e43dfe-4e24-438a-8ae6-122bad049292', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 567.459420] env[62730]: DEBUG oslo.service.loopingcall [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.460022] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 567.460266] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1862241e-5546-4ba9-a707-e5bd7bed4c0f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.481394] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 567.481394] env[62730]: value = "task-4837076" [ 567.481394] env[62730]: _type = "Task" [ 567.481394] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.491584] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837076, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.632491] env[62730]: DEBUG nova.compute.manager [req-cdf62aac-edeb-4c87-a4d1-e15c2eb35043 req-8fa50c1c-814a-4a8e-9ee4-581da12b7652 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Received event network-vif-plugged-6df1fa79-5333-40c4-8385-faef7685c678 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 567.632622] env[62730]: DEBUG oslo_concurrency.lockutils [req-cdf62aac-edeb-4c87-a4d1-e15c2eb35043 req-8fa50c1c-814a-4a8e-9ee4-581da12b7652 service nova] Acquiring lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.633068] env[62730]: DEBUG oslo_concurrency.lockutils [req-cdf62aac-edeb-4c87-a4d1-e15c2eb35043 req-8fa50c1c-814a-4a8e-9ee4-581da12b7652 service nova] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.633068] env[62730]: DEBUG oslo_concurrency.lockutils [req-cdf62aac-edeb-4c87-a4d1-e15c2eb35043 req-8fa50c1c-814a-4a8e-9ee4-581da12b7652 service nova] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.633192] env[62730]: DEBUG nova.compute.manager [req-cdf62aac-edeb-4c87-a4d1-e15c2eb35043 req-8fa50c1c-814a-4a8e-9ee4-581da12b7652 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] No waiting events found dispatching network-vif-plugged-6df1fa79-5333-40c4-8385-faef7685c678 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 567.633402] env[62730]: WARNING nova.compute.manager [req-cdf62aac-edeb-4c87-a4d1-e15c2eb35043 req-8fa50c1c-814a-4a8e-9ee4-581da12b7652 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Received unexpected event network-vif-plugged-6df1fa79-5333-40c4-8385-faef7685c678 for instance with vm_state building and task_state spawning. [ 567.993163] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837076, 'name': CreateVM_Task, 'duration_secs': 0.365265} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.993461] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 567.995095] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.995095] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.995095] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 567.995359] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d3c9ce-52fb-426e-aac0-c94df8b5fd7b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.006235] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 568.006235] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5252c7a5-d5fa-7323-bff2-640d766e2aaa" [ 568.006235] env[62730]: _type = "Task" [ 568.006235] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.018275] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5252c7a5-d5fa-7323-bff2-640d766e2aaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.518836] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.519139] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.519363] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.880529] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "986e37d4-d3ae-42a0-8caa-39b92636b973" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.880880] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.275067] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b69749c6-e6f5-42c8-bcc5-415dd681a6f6 tempest-ServersTestFqdnHostnames-351985442 tempest-ServersTestFqdnHostnames-351985442-project-member] Acquiring lock "c62428f6-0693-4ae7-81ae-eacb56821c3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.275499] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b69749c6-e6f5-42c8-bcc5-415dd681a6f6 tempest-ServersTestFqdnHostnames-351985442 tempest-ServersTestFqdnHostnames-351985442-project-member] Lock "c62428f6-0693-4ae7-81ae-eacb56821c3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.090618] env[62730]: DEBUG nova.compute.manager [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Received event network-changed-6df1fa79-5333-40c4-8385-faef7685c678 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 573.090902] env[62730]: DEBUG nova.compute.manager [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Refreshing instance network info cache due to event network-changed-6df1fa79-5333-40c4-8385-faef7685c678. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 573.091015] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Acquiring lock "refresh_cache-2ed97ed9-4e81-484c-9f0e-baa6968b58a4" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.091173] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Acquired lock "refresh_cache-2ed97ed9-4e81-484c-9f0e-baa6968b58a4" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.091343] env[62730]: DEBUG nova.network.neutron [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Refreshing network info cache for port 6df1fa79-5333-40c4-8385-faef7685c678 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 573.971504] env[62730]: DEBUG nova.network.neutron [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Updated VIF entry in instance network info cache for port 6df1fa79-5333-40c4-8385-faef7685c678. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 573.975099] env[62730]: DEBUG nova.network.neutron [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Updating instance_info_cache with network_info: [{"id": "6df1fa79-5333-40c4-8385-faef7685c678", "address": "fa:16:3e:da:e8:27", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.121", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6df1fa79-53", "ovs_interfaceid": "6df1fa79-5333-40c4-8385-faef7685c678", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.991802] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Releasing lock "refresh_cache-2ed97ed9-4e81-484c-9f0e-baa6968b58a4" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.993241] env[62730]: DEBUG nova.compute.manager [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Received event network-vif-plugged-66e43dfe-4e24-438a-8ae6-122bad049292 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 573.995201] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Acquiring lock "d90fd82e-a469-41c7-b414-c7eb5554e72a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.995201] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.995201] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.995201] env[62730]: DEBUG nova.compute.manager [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] No waiting events found dispatching network-vif-plugged-66e43dfe-4e24-438a-8ae6-122bad049292 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 573.995368] env[62730]: WARNING nova.compute.manager [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Received unexpected event network-vif-plugged-66e43dfe-4e24-438a-8ae6-122bad049292 for instance with vm_state building and task_state spawning. [ 573.995368] env[62730]: DEBUG nova.compute.manager [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Received event network-changed-66e43dfe-4e24-438a-8ae6-122bad049292 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 573.995368] env[62730]: DEBUG nova.compute.manager [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Refreshing instance network info cache due to event network-changed-66e43dfe-4e24-438a-8ae6-122bad049292. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 573.995758] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Acquiring lock "refresh_cache-d90fd82e-a469-41c7-b414-c7eb5554e72a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.997201] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Acquired lock "refresh_cache-d90fd82e-a469-41c7-b414-c7eb5554e72a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.997201] env[62730]: DEBUG nova.network.neutron [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Refreshing network info cache for port 66e43dfe-4e24-438a-8ae6-122bad049292 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 574.615755] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b57f68fa-2c10-4722-9550-49da67838196 tempest-ServersAdmin275Test-133488872 tempest-ServersAdmin275Test-133488872-project-member] Acquiring lock "2074f279-f5f2-4048-abf5-ee61bd9f5002" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.616150] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b57f68fa-2c10-4722-9550-49da67838196 tempest-ServersAdmin275Test-133488872 tempest-ServersAdmin275Test-133488872-project-member] Lock "2074f279-f5f2-4048-abf5-ee61bd9f5002" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.972749] env[62730]: DEBUG nova.network.neutron [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Updated VIF entry in instance network info cache for port 66e43dfe-4e24-438a-8ae6-122bad049292. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 574.973128] env[62730]: DEBUG nova.network.neutron [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Updating instance_info_cache with network_info: [{"id": "66e43dfe-4e24-438a-8ae6-122bad049292", "address": "fa:16:3e:2e:c4:e8", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66e43dfe-4e", "ovs_interfaceid": "66e43dfe-4e24-438a-8ae6-122bad049292", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.995051] env[62730]: DEBUG oslo_concurrency.lockutils [req-eeb1ff1f-c0af-409a-abc3-a904530687c6 req-53798f1e-69f2-4cd6-b3b0-62f56ec003f9 service nova] Releasing lock "refresh_cache-d90fd82e-a469-41c7-b414-c7eb5554e72a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.488644] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2d566888-1b41-4987-845e-df54b014bf72 tempest-VolumesAdminNegativeTest-322510555 tempest-VolumesAdminNegativeTest-322510555-project-member] Acquiring lock "6eab5473-6c72-4bdb-8f84-56de17441f3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.489243] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2d566888-1b41-4987-845e-df54b014bf72 tempest-VolumesAdminNegativeTest-322510555 tempest-VolumesAdminNegativeTest-322510555-project-member] Lock "6eab5473-6c72-4bdb-8f84-56de17441f3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.119279] env[62730]: DEBUG oslo_concurrency.lockutils [None req-95b2345a-8f6d-422e-90bc-ad1d343a8754 tempest-AttachInterfacesV270Test-1552484991 tempest-AttachInterfacesV270Test-1552484991-project-member] Acquiring lock "1ac41735-b0b9-428e-8644-13490403d53e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.119534] env[62730]: DEBUG oslo_concurrency.lockutils [None req-95b2345a-8f6d-422e-90bc-ad1d343a8754 tempest-AttachInterfacesV270Test-1552484991 tempest-AttachInterfacesV270Test-1552484991-project-member] Lock "1ac41735-b0b9-428e-8644-13490403d53e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.975521] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fff3c03-bb4b-450d-8403-c4f4acd0c71c tempest-ServerActionsTestJSON-1709088567 tempest-ServerActionsTestJSON-1709088567-project-member] Acquiring lock "fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.976021] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fff3c03-bb4b-450d-8403-c4f4acd0c71c tempest-ServerActionsTestJSON-1709088567 tempest-ServerActionsTestJSON-1709088567-project-member] Lock "fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.501467] env[62730]: WARNING oslo_vmware.rw_handles [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 584.501467] env[62730]: ERROR oslo_vmware.rw_handles [ 584.502297] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 584.503549] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 584.503901] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Copying Virtual Disk [datastore2] vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/af2749c4-fc7e-4b60-bc93-db529d656ffa/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 584.504166] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9184a9ae-2cfd-43f1-aa10-75f7495a778f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.514012] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Waiting for the task: (returnval){ [ 584.514012] env[62730]: value = "task-4837077" [ 584.514012] env[62730]: _type = "Task" [ 584.514012] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.526330] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Task: {'id': task-4837077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.032866] env[62730]: DEBUG oslo_vmware.exceptions [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 585.032866] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.036238] env[62730]: ERROR nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 585.036238] env[62730]: Faults: ['InvalidArgument'] [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] Traceback (most recent call last): [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] yield resources [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self.driver.spawn(context, instance, image_meta, [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self._fetch_image_if_missing(context, vi) [ 585.036238] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] image_cache(vi, tmp_image_ds_loc) [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] vm_util.copy_virtual_disk( [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] session._wait_for_task(vmdk_copy_task) [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] return self.wait_for_task(task_ref) [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] return evt.wait() [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] result = hub.switch() [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.036673] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] return self.greenlet.switch() [ 585.037105] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 585.037105] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self.f(*self.args, **self.kw) [ 585.037105] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 585.037105] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] raise exceptions.translate_fault(task_info.error) [ 585.037105] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 585.037105] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] Faults: ['InvalidArgument'] [ 585.037105] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] [ 585.037105] env[62730]: INFO nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Terminating instance [ 585.038618] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.038618] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 585.038779] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bbb588e-825a-4021-a885-9beaec923435 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.041692] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquiring lock "refresh_cache-4b189162-95ca-4480-82a1-2025371f235a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.042027] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquired lock "refresh_cache-4b189162-95ca-4480-82a1-2025371f235a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.042238] env[62730]: DEBUG nova.network.neutron [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.051276] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 585.051514] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 585.054282] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d37dad9-cc74-43bc-9a79-b2b2376c839f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.061945] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.062198] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.068416] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Waiting for the task: (returnval){ [ 585.068416] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5253769a-06e3-c9b0-0598-c68e60941379" [ 585.068416] env[62730]: _type = "Task" [ 585.068416] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.078064] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5253769a-06e3-c9b0-0598-c68e60941379, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.084544] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "2a5014e6-835c-45fd-b723-a968782dda58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.084777] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "2a5014e6-835c-45fd-b723-a968782dda58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.120247] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "a897e28b-32bc-4726-ac37-c99dc2efb75d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.120494] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "a897e28b-32bc-4726-ac37-c99dc2efb75d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.121483] env[62730]: DEBUG nova.network.neutron [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.414502] env[62730]: DEBUG nova.network.neutron [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.430555] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Releasing lock "refresh_cache-4b189162-95ca-4480-82a1-2025371f235a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.431099] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 585.431234] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 585.432380] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c6c2a9-fb25-4019-8b7a-71a0aebe863c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.446398] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 585.446883] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8bff04d0-50d1-4064-bd75-6e869256a2ee {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.482571] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 585.482954] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 585.483239] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Deleting the datastore file [datastore2] 4b189162-95ca-4480-82a1-2025371f235a {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 585.483584] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2955e2d7-9e02-4b2c-ae1a-6d762ddb7fad {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.492251] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Waiting for the task: (returnval){ [ 585.492251] env[62730]: value = "task-4837079" [ 585.492251] env[62730]: _type = "Task" [ 585.492251] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.500775] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Task: {'id': task-4837079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.579070] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 585.579477] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Creating directory with path [datastore2] vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 585.579595] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db6edfac-a967-4900-b3ce-6700674b8971 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.593109] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Created directory with path [datastore2] vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 585.593318] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Fetch image to [datastore2] vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 585.593487] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 585.594315] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bacbb52-eeeb-4adc-8b80-2d797d8013a9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.602362] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc7cdef-25d5-44ff-99b4-4f149dd69d36 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.616549] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77090e7-e8ee-4471-ad9f-516c97c83057 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.652354] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537c61b1-c5e8-4897-8eb1-a9f94ecae872 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.659583] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cde5f63a-0182-4cbe-91b3-337224ac579d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.681854] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 585.860340] env[62730]: DEBUG oslo_vmware.rw_handles [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 585.924580] env[62730]: DEBUG oslo_vmware.rw_handles [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 585.924720] env[62730]: DEBUG oslo_vmware.rw_handles [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 586.004103] env[62730]: DEBUG oslo_vmware.api [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Task: {'id': task-4837079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04827} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.004408] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 586.004603] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 586.004776] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 586.005298] env[62730]: INFO nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Took 0.57 seconds to destroy the instance on the hypervisor. [ 586.005565] env[62730]: DEBUG oslo.service.loopingcall [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.005880] env[62730]: DEBUG nova.compute.manager [-] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 586.008923] env[62730]: DEBUG nova.compute.claims [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 586.009174] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.009330] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.461466] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb34743-b76d-4330-9674-c272406f83ec {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.473022] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e15dbf-064d-4778-9772-6f6667b2bf10 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.501971] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e205577-eb42-4dfc-b037-5cb4fb0de004 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.512018] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82657109-a3c0-4caa-b714-938a9b6bf4c5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.525805] env[62730]: DEBUG nova.compute.provider_tree [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.536885] env[62730]: DEBUG nova.scheduler.client.report [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 586.555689] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.546s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.556249] env[62730]: ERROR nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 586.556249] env[62730]: Faults: ['InvalidArgument'] [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] Traceback (most recent call last): [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self.driver.spawn(context, instance, image_meta, [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self._fetch_image_if_missing(context, vi) [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] image_cache(vi, tmp_image_ds_loc) [ 586.556249] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] vm_util.copy_virtual_disk( [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] session._wait_for_task(vmdk_copy_task) [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] return self.wait_for_task(task_ref) [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] return evt.wait() [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] result = hub.switch() [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] return self.greenlet.switch() [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 586.556842] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] self.f(*self.args, **self.kw) [ 586.557404] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 586.557404] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] raise exceptions.translate_fault(task_info.error) [ 586.557404] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 586.557404] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] Faults: ['InvalidArgument'] [ 586.557404] env[62730]: ERROR nova.compute.manager [instance: 4b189162-95ca-4480-82a1-2025371f235a] [ 586.557404] env[62730]: DEBUG nova.compute.utils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 586.559663] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Build of instance 4b189162-95ca-4480-82a1-2025371f235a was re-scheduled: A specified parameter was not correct: fileType [ 586.559663] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 586.560073] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 586.560306] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquiring lock "refresh_cache-4b189162-95ca-4480-82a1-2025371f235a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.560459] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Acquired lock "refresh_cache-4b189162-95ca-4480-82a1-2025371f235a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.560666] env[62730]: DEBUG nova.network.neutron [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 586.627801] env[62730]: DEBUG nova.network.neutron [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.871875] env[62730]: DEBUG nova.network.neutron [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.887069] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Releasing lock "refresh_cache-4b189162-95ca-4480-82a1-2025371f235a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.887069] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 586.887069] env[62730]: DEBUG nova.compute.manager [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] [instance: 4b189162-95ca-4480-82a1-2025371f235a] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 587.002857] env[62730]: INFO nova.scheduler.client.report [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Deleted allocations for instance 4b189162-95ca-4480-82a1-2025371f235a [ 587.031024] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7464021-5be3-49f9-9d8e-638860cc1cb6 tempest-ServerDiagnosticsV248Test-1524066760 tempest-ServerDiagnosticsV248Test-1524066760-project-member] Lock "4b189162-95ca-4480-82a1-2025371f235a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.268s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.060324] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 587.123029] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.123413] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.124893] env[62730]: INFO nova.compute.claims [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.513951] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88637f2b-b280-4940-aba0-4e4a335d3f8b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.522619] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6b348b-cc99-4710-b956-c7bb191ae5b2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.556010] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d3326c-a4d3-461c-a121-d0544799e205 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.564274] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a349a2e-f176-45f5-832b-451232158759 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.578167] env[62730]: DEBUG nova.compute.provider_tree [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.588240] env[62730]: DEBUG nova.scheduler.client.report [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 587.602338] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.479s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.602946] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 587.662348] env[62730]: DEBUG nova.compute.utils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 587.662348] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 587.662348] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 587.673226] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 587.756554] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 587.797081] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.797337] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.797499] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.797685] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.797835] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.797987] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.800293] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.800503] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.800706] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.800904] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.801110] env[62730]: DEBUG nova.virt.hardware [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.801997] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3733aec8-b24b-44b7-b70f-8e7472691457 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.810083] env[62730]: DEBUG nova.policy [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c450a138dff9420583e79af30b0cfeee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e090f6e3fd264211b21d6d8407d12cc7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 587.819119] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94adf54-3e08-4efc-a28b-cf6980f26b2e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.610708] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Successfully created port: bd69886f-b190-4b8e-bdb7-9835ca8d5004 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.959162] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7e304476-8bc0-4d27-b120-7cbb36689689 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "2f233d8c-7e64-433e-82aa-ca4b1b2a1798" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.959461] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7e304476-8bc0-4d27-b120-7cbb36689689 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "2f233d8c-7e64-433e-82aa-ca4b1b2a1798" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.076952] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Successfully updated port: bd69886f-b190-4b8e-bdb7-9835ca8d5004 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 590.093399] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "refresh_cache-1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.093547] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquired lock "refresh_cache-1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.093694] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 590.186528] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 590.587989] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Updating instance_info_cache with network_info: [{"id": "bd69886f-b190-4b8e-bdb7-9835ca8d5004", "address": "fa:16:3e:ae:59:f9", "network": {"id": "795f94c5-cea0-40b3-bc39-60d5b303e964", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1578054125-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e090f6e3fd264211b21d6d8407d12cc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd69886f-b1", "ovs_interfaceid": "bd69886f-b190-4b8e-bdb7-9835ca8d5004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.601922] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Releasing lock "refresh_cache-1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.602250] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Instance network_info: |[{"id": "bd69886f-b190-4b8e-bdb7-9835ca8d5004", "address": "fa:16:3e:ae:59:f9", "network": {"id": "795f94c5-cea0-40b3-bc39-60d5b303e964", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1578054125-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e090f6e3fd264211b21d6d8407d12cc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd69886f-b1", "ovs_interfaceid": "bd69886f-b190-4b8e-bdb7-9835ca8d5004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 590.602992] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:59:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '01fe2e08-46f6-4cee-aefd-934461f8077d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd69886f-b190-4b8e-bdb7-9835ca8d5004', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.611301] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Creating folder: Project (e090f6e3fd264211b21d6d8407d12cc7). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.611965] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36ab7605-9787-41b6-9e1a-da3b90a3e1e5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.625542] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Created folder: Project (e090f6e3fd264211b21d6d8407d12cc7) in parent group-v942928. [ 590.625889] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Creating folder: Instances. Parent ref: group-v942955. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 590.626210] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4741b7f9-a1c6-41a1-bb6b-18bdc4c29899 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.637442] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Created folder: Instances in parent group-v942955. [ 590.637442] env[62730]: DEBUG oslo.service.loopingcall [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.637599] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 590.637808] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f399d62-c994-4dfe-a016-2f3fe9e6ff4b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.658878] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.658878] env[62730]: value = "task-4837082" [ 590.658878] env[62730]: _type = "Task" [ 590.658878] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.668024] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837082, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.170922] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837082, 'name': CreateVM_Task, 'duration_secs': 0.367506} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.171303] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 591.171946] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.172130] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.172442] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 591.172693] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bacb71a-2292-49d0-b168-498685f7c8f5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.178988] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Waiting for the task: (returnval){ [ 591.178988] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52cdba8b-2f5e-c24f-95c5-e573587facdb" [ 591.178988] env[62730]: _type = "Task" [ 591.178988] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.188968] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52cdba8b-2f5e-c24f-95c5-e573587facdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.213629] env[62730]: DEBUG nova.compute.manager [req-694fe9ce-0d20-4926-aa32-2026b45c5de1 req-5518ba39-588b-40f5-b799-48b132ba89f5 service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Received event network-vif-plugged-bd69886f-b190-4b8e-bdb7-9835ca8d5004 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 591.213994] env[62730]: DEBUG oslo_concurrency.lockutils [req-694fe9ce-0d20-4926-aa32-2026b45c5de1 req-5518ba39-588b-40f5-b799-48b132ba89f5 service nova] Acquiring lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.214342] env[62730]: DEBUG oslo_concurrency.lockutils [req-694fe9ce-0d20-4926-aa32-2026b45c5de1 req-5518ba39-588b-40f5-b799-48b132ba89f5 service nova] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.214695] env[62730]: DEBUG oslo_concurrency.lockutils [req-694fe9ce-0d20-4926-aa32-2026b45c5de1 req-5518ba39-588b-40f5-b799-48b132ba89f5 service nova] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.214850] env[62730]: DEBUG nova.compute.manager [req-694fe9ce-0d20-4926-aa32-2026b45c5de1 req-5518ba39-588b-40f5-b799-48b132ba89f5 service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] No waiting events found dispatching network-vif-plugged-bd69886f-b190-4b8e-bdb7-9835ca8d5004 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 591.215120] env[62730]: WARNING nova.compute.manager [req-694fe9ce-0d20-4926-aa32-2026b45c5de1 req-5518ba39-588b-40f5-b799-48b132ba89f5 service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Received unexpected event network-vif-plugged-bd69886f-b190-4b8e-bdb7-9835ca8d5004 for instance with vm_state building and task_state spawning. [ 591.689686] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.690041] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.690736] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.086532] env[62730]: DEBUG nova.compute.manager [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Received event network-changed-bd69886f-b190-4b8e-bdb7-9835ca8d5004 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 595.086802] env[62730]: DEBUG nova.compute.manager [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Refreshing instance network info cache due to event network-changed-bd69886f-b190-4b8e-bdb7-9835ca8d5004. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 595.087091] env[62730]: DEBUG oslo_concurrency.lockutils [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] Acquiring lock "refresh_cache-1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.087180] env[62730]: DEBUG oslo_concurrency.lockutils [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] Acquired lock "refresh_cache-1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.087372] env[62730]: DEBUG nova.network.neutron [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Refreshing network info cache for port bd69886f-b190-4b8e-bdb7-9835ca8d5004 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 595.570354] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "91052772-87d4-4fb3-b590-f071c0419196" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.570625] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "91052772-87d4-4fb3-b590-f071c0419196" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.156043] env[62730]: DEBUG nova.network.neutron [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Updated VIF entry in instance network info cache for port bd69886f-b190-4b8e-bdb7-9835ca8d5004. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 596.156330] env[62730]: DEBUG nova.network.neutron [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Updating instance_info_cache with network_info: [{"id": "bd69886f-b190-4b8e-bdb7-9835ca8d5004", "address": "fa:16:3e:ae:59:f9", "network": {"id": "795f94c5-cea0-40b3-bc39-60d5b303e964", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1578054125-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e090f6e3fd264211b21d6d8407d12cc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd69886f-b1", "ovs_interfaceid": "bd69886f-b190-4b8e-bdb7-9835ca8d5004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.176179] env[62730]: DEBUG oslo_concurrency.lockutils [req-de6f3b10-5220-4991-9238-27ce294a71f0 req-27e90f28-06b1-4945-b43b-f75cff500c8e service nova] Releasing lock "refresh_cache-1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.529958] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d7175b12-7b3e-4114-a82f-b418d676df40 tempest-ServerGroupTestJSON-1801781332 tempest-ServerGroupTestJSON-1801781332-project-member] Acquiring lock "c1658258-9147-431c-9e6d-5f8360523c23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.529958] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d7175b12-7b3e-4114-a82f-b418d676df40 tempest-ServerGroupTestJSON-1801781332 tempest-ServerGroupTestJSON-1801781332-project-member] Lock "c1658258-9147-431c-9e6d-5f8360523c23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.363414] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.398496] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.398695] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.400486] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.400486] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.737209] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.737398] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 603.737516] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 603.771833] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.772155] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.774648] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.774716] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.775562] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.775876] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.776164] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.776435] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.776681] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.776922] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 603.777169] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 603.779023] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.790966] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.791291] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.791373] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.791515] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 603.792624] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b558a9a6-fe83-4a0b-b377-a9e3a7662d09 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.805424] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c84bb0f-2c87-4a6e-acd7-e704a379eb62 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.821744] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65bbbe3-8dc9-4e39-9a33-b4d6f51a9599 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.830932] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb1d927-1731-407b-bc5d-96be42cc14fa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.869562] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180538MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 603.871043] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.871043] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.979050] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979050] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979050] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 736075f4-302b-4b1a-9358-7fe2fb73a36f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979050] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0a718440-a0f8-4614-a9f3-553b2ff2e156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979398] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 318f7880-c500-40b8-9ca1-d8a857b36a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979398] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16f7dfdb-2063-4992-9f40-4b332006940f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979398] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979398] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979536] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d90fd82e-a469-41c7-b414-c7eb5554e72a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 603.979536] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 604.014319] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.045752] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.065844] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.078837] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c62428f6-0693-4ae7-81ae-eacb56821c3b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.094388] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2074f279-f5f2-4048-abf5-ee61bd9f5002 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.115018] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6eab5473-6c72-4bdb-8f84-56de17441f3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.125970] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1ac41735-b0b9-428e-8644-13490403d53e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.140028] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.151778] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f4408a1f-d3f2-4e1e-ba96-cd509166e31d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.163663] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2a5014e6-835c-45fd-b723-a968782dda58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.178999] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a897e28b-32bc-4726-ac37-c99dc2efb75d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.192341] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2f233d8c-7e64-433e-82aa-ca4b1b2a1798 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.211218] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.228094] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1658258-9147-431c-9e6d-5f8360523c23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 604.228454] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 604.228865] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '1', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_cf586645a56d4614a40b01d686d0c8bc': '1', 'io_workload': '10', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_42256023b89344de90ced8c51fd48cf6': '1', 'num_proj_b825311d36404f199e86101b21b30ad5': '1', 'num_proj_76b57f69c45049f4b76e1ea4c1f78513': '1', 'num_proj_cf705f506bcc4409881416d80a745afc': '2', 'num_proj_7e26a6097b8c4bf3b6d4b77656087f8c': '2', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 604.687129] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1216018-ef7d-47f6-91c5-9275e72d547b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.694248] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6185201c-27ef-4209-97ba-7512599d53cd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.733797] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6db89a9-b774-4111-b27a-962cd3ae31b4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.742816] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d71f5b-0b87-4c14-a046-3087b274d2c4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.758105] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.775651] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.795976] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 604.796566] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.926s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.755541] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.756274] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.756274] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.756274] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 606.486128] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e937e9cd-b87a-4928-b59f-c4a5024e2ad3 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "9d39c196-4ab4-4a97-9c82-44b4a4b107a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.487642] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e937e9cd-b87a-4928-b59f-c4a5024e2ad3 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "9d39c196-4ab4-4a97-9c82-44b4a4b107a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.016352] env[62730]: DEBUG oslo_concurrency.lockutils [None req-56821f9e-77d9-42b7-8043-a3b68677cfbe tempest-ServerMetadataTestJSON-1845498096 tempest-ServerMetadataTestJSON-1845498096-project-member] Acquiring lock "b7273f62-c330-4b6a-a6e3-39d76c46aac9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.017051] env[62730]: DEBUG oslo_concurrency.lockutils [None req-56821f9e-77d9-42b7-8043-a3b68677cfbe tempest-ServerMetadataTestJSON-1845498096 tempest-ServerMetadataTestJSON-1845498096-project-member] Lock "b7273f62-c330-4b6a-a6e3-39d76c46aac9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.969682] env[62730]: DEBUG oslo_concurrency.lockutils [None req-18c2580f-111a-47f9-a656-a0a19dd93958 tempest-ServerExternalEventsTest-1788746039 tempest-ServerExternalEventsTest-1788746039-project-member] Acquiring lock "1dc56d8a-02e3-4441-9bb5-f091ecac835a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.969682] env[62730]: DEBUG oslo_concurrency.lockutils [None req-18c2580f-111a-47f9-a656-a0a19dd93958 tempest-ServerExternalEventsTest-1788746039 tempest-ServerExternalEventsTest-1788746039-project-member] Lock "1dc56d8a-02e3-4441-9bb5-f091ecac835a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.579371] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a599fe64-53d8-4a9b-a264-5b4b276cf7e1 tempest-ServersNegativeTestMultiTenantJSON-1706258361 tempest-ServersNegativeTestMultiTenantJSON-1706258361-project-member] Acquiring lock "d8656d64-907d-4524-905a-aa67a4ad1f63" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.579726] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a599fe64-53d8-4a9b-a264-5b4b276cf7e1 tempest-ServersNegativeTestMultiTenantJSON-1706258361 tempest-ServersNegativeTestMultiTenantJSON-1706258361-project-member] Lock "d8656d64-907d-4524-905a-aa67a4ad1f63" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.015487] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8d294b59-eedf-4a43-9926-42699b612df4 tempest-ServerAddressesTestJSON-2073240203 tempest-ServerAddressesTestJSON-2073240203-project-member] Acquiring lock "6d8cad34-699c-4dcc-8f83-e21490f82b8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.015892] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8d294b59-eedf-4a43-9926-42699b612df4 tempest-ServerAddressesTestJSON-2073240203 tempest-ServerAddressesTestJSON-2073240203-project-member] Lock "6d8cad34-699c-4dcc-8f83-e21490f82b8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.031736] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d887d11b-fd41-49f2-86d2-73e73b305897 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Acquiring lock "cb5b5e10-41e5-497e-b409-0a83fa00896c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.032092] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d887d11b-fd41-49f2-86d2-73e73b305897 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "cb5b5e10-41e5-497e-b409-0a83fa00896c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.816740] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c60a7df4-bcf2-48f7-9bb6-8ba782782026 tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] Acquiring lock "91907341-29ff-42b3-a25f-a9e990af4de1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.816740] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c60a7df4-bcf2-48f7-9bb6-8ba782782026 tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] Lock "91907341-29ff-42b3-a25f-a9e990af4de1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.496808] env[62730]: WARNING oslo_vmware.rw_handles [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 634.496808] env[62730]: ERROR oslo_vmware.rw_handles [ 634.497433] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 634.498730] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 634.498978] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Copying Virtual Disk [datastore2] vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/144f4b2b-6227-4030-a5b1-9114252fcd41/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 634.499333] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89faacae-f9b4-484d-9d64-a45534864bde {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.508829] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Waiting for the task: (returnval){ [ 634.508829] env[62730]: value = "task-4837083" [ 634.508829] env[62730]: _type = "Task" [ 634.508829] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.517896] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Task: {'id': task-4837083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.021637] env[62730]: DEBUG oslo_vmware.exceptions [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 635.021935] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.022529] env[62730]: ERROR nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 635.022529] env[62730]: Faults: ['InvalidArgument'] [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Traceback (most recent call last): [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] yield resources [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self.driver.spawn(context, instance, image_meta, [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self._fetch_image_if_missing(context, vi) [ 635.022529] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] image_cache(vi, tmp_image_ds_loc) [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] vm_util.copy_virtual_disk( [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] session._wait_for_task(vmdk_copy_task) [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] return self.wait_for_task(task_ref) [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] return evt.wait() [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] result = hub.switch() [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.023046] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] return self.greenlet.switch() [ 635.024441] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 635.024441] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self.f(*self.args, **self.kw) [ 635.024441] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 635.024441] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] raise exceptions.translate_fault(task_info.error) [ 635.024441] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 635.024441] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Faults: ['InvalidArgument'] [ 635.024441] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] [ 635.024441] env[62730]: INFO nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Terminating instance [ 635.024826] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.024991] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.029465] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 635.029737] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 635.030019] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dba3155b-cf4e-4cf1-91a6-39703a8d6239 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.033156] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5db966-09d9-4386-bc8a-b12efd4019bb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.046192] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 635.050845] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-992e51f7-c95c-4b30-b40f-ece8fbea7742 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.052730] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.052824] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 635.053920] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b74d1f66-5432-4206-8431-021e45f0a8f1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.061049] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for the task: (returnval){ [ 635.061049] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]528b2b63-f0bb-ab50-c0d6-cb8f3c9eab50" [ 635.061049] env[62730]: _type = "Task" [ 635.061049] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.070908] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]528b2b63-f0bb-ab50-c0d6-cb8f3c9eab50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.126042] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 635.126042] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 635.126042] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Deleting the datastore file [datastore2] 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.126042] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f202c058-db3c-4462-b1eb-cd1292e6220a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.132846] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Waiting for the task: (returnval){ [ 635.132846] env[62730]: value = "task-4837085" [ 635.132846] env[62730]: _type = "Task" [ 635.132846] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.142181] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Task: {'id': task-4837085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.574273] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 635.574597] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Creating directory with path [datastore2] vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.575244] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57f5c016-c70c-4424-b2e1-745854ef9d0f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.643851] env[62730]: DEBUG oslo_vmware.api [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Task: {'id': task-4837085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083664} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.644205] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.644415] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 635.644591] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 635.644768] env[62730]: INFO nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Took 0.62 seconds to destroy the instance on the hypervisor. [ 635.649313] env[62730]: DEBUG nova.compute.claims [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 635.649313] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.649313] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.656806] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Created directory with path [datastore2] vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.657026] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Fetch image to [datastore2] vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 635.657211] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 635.657995] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d6a5dc-567f-4d78-9d25-f157f8158420 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.665399] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d13231-0bcd-4d81-ba13-636721cc315d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.676859] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ade0d7-af88-4183-9908-52b9bfc8100b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.713717] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427904a4-3ab7-4360-b872-cdffc3143562 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.721103] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6591f5eb-037d-4b68-93e1-86be7df38350 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.744863] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 635.824394] env[62730]: DEBUG oslo_vmware.rw_handles [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 635.891611] env[62730]: DEBUG oslo_vmware.rw_handles [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 635.891611] env[62730]: DEBUG oslo_vmware.rw_handles [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 636.288017] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dfc2e5-b4fa-42c1-bd06-7c382d4bd453 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.295231] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa99e50c-504d-4279-8822-52dea6c566c7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.327748] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be493003-ae26-4dbd-ba51-bd6f993b48ff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.336587] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2e57b2-85de-4ed3-9f9d-e61aedfd969a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.351853] env[62730]: DEBUG nova.compute.provider_tree [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.366182] env[62730]: DEBUG nova.scheduler.client.report [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.385687] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.738s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.386662] env[62730]: ERROR nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.386662] env[62730]: Faults: ['InvalidArgument'] [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Traceback (most recent call last): [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self.driver.spawn(context, instance, image_meta, [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self._fetch_image_if_missing(context, vi) [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] image_cache(vi, tmp_image_ds_loc) [ 636.386662] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] vm_util.copy_virtual_disk( [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] session._wait_for_task(vmdk_copy_task) [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] return self.wait_for_task(task_ref) [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] return evt.wait() [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] result = hub.switch() [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] return self.greenlet.switch() [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 636.387009] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] self.f(*self.args, **self.kw) [ 636.387279] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 636.387279] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] raise exceptions.translate_fault(task_info.error) [ 636.387279] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 636.387279] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Faults: ['InvalidArgument'] [ 636.387279] env[62730]: ERROR nova.compute.manager [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] [ 636.387473] env[62730]: DEBUG nova.compute.utils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 636.392498] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Build of instance 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5 was re-scheduled: A specified parameter was not correct: fileType [ 636.392498] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 636.392941] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 636.393145] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 636.394664] env[62730]: DEBUG nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 636.394664] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 637.719541] env[62730]: DEBUG nova.network.neutron [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.738819] env[62730]: INFO nova.compute.manager [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] [instance: 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5] Took 1.35 seconds to deallocate network for instance. [ 637.895621] env[62730]: INFO nova.scheduler.client.report [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Deleted allocations for instance 1ffe728f-e01d-4fbc-9e67-1c4868bae8e5 [ 637.936737] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ef60d15-ce4d-48ce-8709-4217bb8eb9db tempest-ServerDiagnosticsNegativeTest-408414205 tempest-ServerDiagnosticsNegativeTest-408414205-project-member] Lock "1ffe728f-e01d-4fbc-9e67-1c4868bae8e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.770s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.952057] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 638.034342] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.034614] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.037816] env[62730]: INFO nova.compute.claims [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.373115] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.373629] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.653031] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6039081e-1841-4e6d-9261-15fc9720f2a6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.661460] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9769a3-ae66-4d57-a0d2-e64c62498c8c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.705764] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ca5530-b573-4e00-b798-ffed56716ab4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.714076] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8d5dbc-5cf9-4900-a359-6d4ad2b606b2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.727926] env[62730]: DEBUG nova.compute.provider_tree [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.743016] env[62730]: DEBUG nova.scheduler.client.report [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 638.761189] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.726s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.761807] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 638.822318] env[62730]: DEBUG nova.compute.utils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.825403] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 638.825473] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 638.836164] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 638.920146] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 638.950924] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:09:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='339393130',id=21,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-1702718897',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 638.951275] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 638.951470] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.951925] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 638.952161] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.952361] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 638.952617] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 638.953018] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 638.953262] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 638.954049] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 638.954049] env[62730]: DEBUG nova.virt.hardware [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 638.954931] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02a4965-410d-4afc-80d7-029f813a9709 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.964933] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b162ff17-d95e-4c70-93b5-7d98faa2a657 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.993312] env[62730]: DEBUG nova.policy [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '672a107ea2ac47d3922e9296e548a835', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '534bb3f3d10946c8a3b9d3100be143cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 640.068282] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Successfully created port: 2ca16ba9-77e6-4af2-a57b-aed466ebab92 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.529866] env[62730]: DEBUG oslo_concurrency.lockutils [None req-89c0b534-6c53-4d11-9023-c0941cd4b1b8 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "4cefd92c-8058-4e3e-a175-4807a84e0b3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.532948] env[62730]: DEBUG oslo_concurrency.lockutils [None req-89c0b534-6c53-4d11-9023-c0941cd4b1b8 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "4cefd92c-8058-4e3e-a175-4807a84e0b3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.294483] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Successfully updated port: 2ca16ba9-77e6-4af2-a57b-aed466ebab92 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 641.302862] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "refresh_cache-cbdca8b1-7929-4d2c-860c-2b74826d1d11" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.303073] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquired lock "refresh_cache-cbdca8b1-7929-4d2c-860c-2b74826d1d11" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.303262] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 641.378946] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.734634] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3f699b23-b687-4fdd-a972-63acb361c320 tempest-ServersNegativeTestJSON-202746956 tempest-ServersNegativeTestJSON-202746956-project-member] Acquiring lock "45bb8da2-c544-4935-a3a8-62305e599c06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.734839] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3f699b23-b687-4fdd-a972-63acb361c320 tempest-ServersNegativeTestJSON-202746956 tempest-ServersNegativeTestJSON-202746956-project-member] Lock "45bb8da2-c544-4935-a3a8-62305e599c06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.770153] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Updating instance_info_cache with network_info: [{"id": "2ca16ba9-77e6-4af2-a57b-aed466ebab92", "address": "fa:16:3e:cd:2b:91", "network": {"id": "94e0eba5-b129-4a06-b519-6553ecf9d2c7", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1282838165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "534bb3f3d10946c8a3b9d3100be143cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ca16ba9-77", "ovs_interfaceid": "2ca16ba9-77e6-4af2-a57b-aed466ebab92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.792312] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Releasing lock "refresh_cache-cbdca8b1-7929-4d2c-860c-2b74826d1d11" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.792593] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Instance network_info: |[{"id": "2ca16ba9-77e6-4af2-a57b-aed466ebab92", "address": "fa:16:3e:cd:2b:91", "network": {"id": "94e0eba5-b129-4a06-b519-6553ecf9d2c7", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1282838165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "534bb3f3d10946c8a3b9d3100be143cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ca16ba9-77", "ovs_interfaceid": "2ca16ba9-77e6-4af2-a57b-aed466ebab92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 641.792975] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:2b:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ca16ba9-77e6-4af2-a57b-aed466ebab92', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.800766] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Creating folder: Project (534bb3f3d10946c8a3b9d3100be143cf). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 641.802020] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff3d19bd-99e2-4588-841f-6430a9524ad7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.813091] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Created folder: Project (534bb3f3d10946c8a3b9d3100be143cf) in parent group-v942928. [ 641.813293] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Creating folder: Instances. Parent ref: group-v942958. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 641.813537] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcb2b1fb-d900-4c32-8dbe-4cf05a0fa92e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.824193] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Created folder: Instances in parent group-v942958. [ 641.824193] env[62730]: DEBUG oslo.service.loopingcall [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.824329] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 641.824499] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74378fd4-c1ed-4530-aa18-6eb2cf435bb4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.848981] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.848981] env[62730]: value = "task-4837088" [ 641.848981] env[62730]: _type = "Task" [ 641.848981] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.858324] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837088, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.361651] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837088, 'name': CreateVM_Task, 'duration_secs': 0.318689} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.361952] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 642.362547] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.362717] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.363045] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.363386] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6058f72c-e3bf-46ac-b2ce-03c20f80a6f3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.368774] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Waiting for the task: (returnval){ [ 642.368774] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]525927a6-5c0f-d3a4-71e7-36e13617855f" [ 642.368774] env[62730]: _type = "Task" [ 642.368774] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.377452] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]525927a6-5c0f-d3a4-71e7-36e13617855f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.414395] env[62730]: DEBUG nova.compute.manager [req-9fabea44-1f9a-4a1e-9ad2-317b4ac2b033 req-8373277a-a440-4f86-99de-3142a689a7ee service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Received event network-vif-plugged-2ca16ba9-77e6-4af2-a57b-aed466ebab92 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 642.414395] env[62730]: DEBUG oslo_concurrency.lockutils [req-9fabea44-1f9a-4a1e-9ad2-317b4ac2b033 req-8373277a-a440-4f86-99de-3142a689a7ee service nova] Acquiring lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.414395] env[62730]: DEBUG oslo_concurrency.lockutils [req-9fabea44-1f9a-4a1e-9ad2-317b4ac2b033 req-8373277a-a440-4f86-99de-3142a689a7ee service nova] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.414395] env[62730]: DEBUG oslo_concurrency.lockutils [req-9fabea44-1f9a-4a1e-9ad2-317b4ac2b033 req-8373277a-a440-4f86-99de-3142a689a7ee service nova] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.414531] env[62730]: DEBUG nova.compute.manager [req-9fabea44-1f9a-4a1e-9ad2-317b4ac2b033 req-8373277a-a440-4f86-99de-3142a689a7ee service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] No waiting events found dispatching network-vif-plugged-2ca16ba9-77e6-4af2-a57b-aed466ebab92 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 642.414952] env[62730]: WARNING nova.compute.manager [req-9fabea44-1f9a-4a1e-9ad2-317b4ac2b033 req-8373277a-a440-4f86-99de-3142a689a7ee service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Received unexpected event network-vif-plugged-2ca16ba9-77e6-4af2-a57b-aed466ebab92 for instance with vm_state building and task_state spawning. [ 642.507896] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4df8e47e-4873-4a79-bbb1-ce4b48a512db tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "0e7106c5-fca6-4d97-a6dd-f0670ca42202" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.508188] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4df8e47e-4873-4a79-bbb1-ce4b48a512db tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "0e7106c5-fca6-4d97-a6dd-f0670ca42202" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.885561] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.885905] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.886191] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.647975] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e95cd872-f602-4f1d-a989-8ca7023305c6 tempest-TenantUsagesTestJSON-1260001862 tempest-TenantUsagesTestJSON-1260001862-project-member] Acquiring lock "3d3b40de-1123-44ed-b241-746731c3097c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.650473] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e95cd872-f602-4f1d-a989-8ca7023305c6 tempest-TenantUsagesTestJSON-1260001862 tempest-TenantUsagesTestJSON-1260001862-project-member] Lock "3d3b40de-1123-44ed-b241-746731c3097c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.911085] env[62730]: DEBUG nova.compute.manager [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Received event network-changed-2ca16ba9-77e6-4af2-a57b-aed466ebab92 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 644.911085] env[62730]: DEBUG nova.compute.manager [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Refreshing instance network info cache due to event network-changed-2ca16ba9-77e6-4af2-a57b-aed466ebab92. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 644.911085] env[62730]: DEBUG oslo_concurrency.lockutils [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] Acquiring lock "refresh_cache-cbdca8b1-7929-4d2c-860c-2b74826d1d11" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.911085] env[62730]: DEBUG oslo_concurrency.lockutils [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] Acquired lock "refresh_cache-cbdca8b1-7929-4d2c-860c-2b74826d1d11" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.911085] env[62730]: DEBUG nova.network.neutron [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Refreshing network info cache for port 2ca16ba9-77e6-4af2-a57b-aed466ebab92 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 645.440144] env[62730]: DEBUG nova.network.neutron [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Updated VIF entry in instance network info cache for port 2ca16ba9-77e6-4af2-a57b-aed466ebab92. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 645.440446] env[62730]: DEBUG nova.network.neutron [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Updating instance_info_cache with network_info: [{"id": "2ca16ba9-77e6-4af2-a57b-aed466ebab92", "address": "fa:16:3e:cd:2b:91", "network": {"id": "94e0eba5-b129-4a06-b519-6553ecf9d2c7", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1282838165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "534bb3f3d10946c8a3b9d3100be143cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ca16ba9-77", "ovs_interfaceid": "2ca16ba9-77e6-4af2-a57b-aed466ebab92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.451336] env[62730]: DEBUG oslo_concurrency.lockutils [req-a36606b0-e6f2-41f4-b550-72aa6f0fe24e req-1ec6bb63-b179-47f3-8610-c53d823de4fc service nova] Releasing lock "refresh_cache-cbdca8b1-7929-4d2c-860c-2b74826d1d11" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.859128] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4a4512d4-ea29-48df-87a9-c405e7224534 tempest-ServerTagsTestJSON-360681645 tempest-ServerTagsTestJSON-360681645-project-member] Acquiring lock "7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.859422] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4a4512d4-ea29-48df-87a9-c405e7224534 tempest-ServerTagsTestJSON-360681645 tempest-ServerTagsTestJSON-360681645-project-member] Lock "7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.737657] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 663.737994] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 663.737994] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.737345] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.737626] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.737784] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 664.738025] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 664.752283] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.752514] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.752686] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.752843] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 664.753979] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837048e1-f909-4bfe-8aaf-2249d985ec54 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.763057] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9e5038-2423-415e-a8e4-959d8ee86631 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.778230] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ce37c2-b94a-4413-a737-3bf643ddfeeb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.785973] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03122509-8e0a-49d1-915d-a253858edc83 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.817437] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180517MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 664.817604] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.817810] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.897700] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.897898] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 736075f4-302b-4b1a-9358-7fe2fb73a36f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.898315] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0a718440-a0f8-4614-a9f3-553b2ff2e156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.898315] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 318f7880-c500-40b8-9ca1-d8a857b36a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.898452] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16f7dfdb-2063-4992-9f40-4b332006940f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.898597] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.898735] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.898851] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d90fd82e-a469-41c7-b414-c7eb5554e72a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.898975] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.899108] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 664.911944] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 664.922956] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 664.934707] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c62428f6-0693-4ae7-81ae-eacb56821c3b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 664.947316] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2074f279-f5f2-4048-abf5-ee61bd9f5002 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 664.958650] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6eab5473-6c72-4bdb-8f84-56de17441f3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 664.970429] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1ac41735-b0b9-428e-8644-13490403d53e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 664.985452] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 664.997939] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f4408a1f-d3f2-4e1e-ba96-cd509166e31d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.008265] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2a5014e6-835c-45fd-b723-a968782dda58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.019527] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a897e28b-32bc-4726-ac37-c99dc2efb75d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.030695] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2f233d8c-7e64-433e-82aa-ca4b1b2a1798 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.045015] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.056548] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1658258-9147-431c-9e6d-5f8360523c23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.067628] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9d39c196-4ab4-4a97-9c82-44b4a4b107a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.079232] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b7273f62-c330-4b6a-a6e3-39d76c46aac9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.091032] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1dc56d8a-02e3-4441-9bb5-f091ecac835a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.103773] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8656d64-907d-4524-905a-aa67a4ad1f63 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.114837] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6d8cad34-699c-4dcc-8f83-e21490f82b8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.125956] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cb5b5e10-41e5-497e-b409-0a83fa00896c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.138056] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91907341-29ff-42b3-a25f-a9e990af4de1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.148450] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.158991] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4cefd92c-8058-4e3e-a175-4807a84e0b3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.169632] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 45bb8da2-c544-4935-a3a8-62305e599c06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.180519] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0e7106c5-fca6-4d97-a6dd-f0670ca42202 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.190936] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3d3b40de-1123-44ed-b241-746731c3097c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.202193] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 665.202429] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 665.202592] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '2', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'io_workload': '10', 'num_proj_42256023b89344de90ced8c51fd48cf6': '1', 'num_proj_b825311d36404f199e86101b21b30ad5': '1', 'num_proj_76b57f69c45049f4b76e1ea4c1f78513': '1', 'num_proj_cf705f506bcc4409881416d80a745afc': '2', 'num_proj_7e26a6097b8c4bf3b6d4b77656087f8c': '2', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 665.673228] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc4275d-2cd5-46fa-86b0-e8650b50ad00 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.681567] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999fcd40-be96-4249-a1f5-848b7e10b55f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.713472] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80f7251-5728-43d1-9285-2411713f0450 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.723424] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25afbf9c-88b7-47b2-86c9-da02e73cba7c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.739069] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.747967] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 665.763120] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 665.763312] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.945s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.763647] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.763962] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.764053] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 666.764183] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 666.785020] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.785227] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.785354] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.785490] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.785618] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.785741] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.785863] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.786016] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.786468] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.786629] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 666.786758] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 666.787351] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.514033] env[62730]: WARNING oslo_vmware.rw_handles [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 684.514033] env[62730]: ERROR oslo_vmware.rw_handles [ 684.514761] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 684.515953] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 684.516208] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Copying Virtual Disk [datastore2] vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/dddeab17-d2bc-4764-beb5-5a82efbf52e1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 684.516488] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76ac4232-c1ef-4767-9ae2-f70ad9abd6f3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.525358] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for the task: (returnval){ [ 684.525358] env[62730]: value = "task-4837089" [ 684.525358] env[62730]: _type = "Task" [ 684.525358] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.533516] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Task: {'id': task-4837089, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.035252] env[62730]: DEBUG oslo_vmware.exceptions [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 685.035584] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.036154] env[62730]: ERROR nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 685.036154] env[62730]: Faults: ['InvalidArgument'] [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Traceback (most recent call last): [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] yield resources [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self.driver.spawn(context, instance, image_meta, [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self._fetch_image_if_missing(context, vi) [ 685.036154] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] image_cache(vi, tmp_image_ds_loc) [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] vm_util.copy_virtual_disk( [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] session._wait_for_task(vmdk_copy_task) [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] return self.wait_for_task(task_ref) [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] return evt.wait() [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] result = hub.switch() [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.036512] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] return self.greenlet.switch() [ 685.036850] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 685.036850] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self.f(*self.args, **self.kw) [ 685.036850] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 685.036850] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] raise exceptions.translate_fault(task_info.error) [ 685.036850] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 685.036850] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Faults: ['InvalidArgument'] [ 685.036850] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] [ 685.036850] env[62730]: INFO nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Terminating instance [ 685.038058] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.038242] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.038492] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b4c3a9d-b4df-4261-8104-a9e311e3b8ce {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.040892] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 685.040967] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 685.041709] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d401bb-1a59-45b0-bd93-dfd0f9b814cc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.048760] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 685.048986] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e0dfe7f-725a-4282-b919-e4c88ca24994 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.051405] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.051579] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 685.052529] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd398f3c-838f-44a3-b40e-ff6b9da97f54 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.057968] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Waiting for the task: (returnval){ [ 685.057968] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5287fa61-2790-0832-e6bb-2aefe135df14" [ 685.057968] env[62730]: _type = "Task" [ 685.057968] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.069520] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5287fa61-2790-0832-e6bb-2aefe135df14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.119157] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 685.119157] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 685.119157] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Deleting the datastore file [datastore2] ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.119348] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b08556a0-bbd5-436a-8de7-b1fb959d6764 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.126915] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for the task: (returnval){ [ 685.126915] env[62730]: value = "task-4837091" [ 685.126915] env[62730]: _type = "Task" [ 685.126915] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.134476] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Task: {'id': task-4837091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.568668] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 685.568982] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Creating directory with path [datastore2] vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.569242] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27709480-ba0c-476b-94cc-540860d5d34d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.581760] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Created directory with path [datastore2] vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.581982] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Fetch image to [datastore2] vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 685.582172] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 685.583041] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed01075-2de4-4e98-b1c3-73db8bf6734a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.590174] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6279123e-4ab4-40f5-be7c-15ec8d578011 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.599455] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d266bc0c-3035-4691-8562-e00e69ed7265 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.634822] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d4be49-d4ba-4c9b-a1cd-3a6088cf5b6d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.643107] env[62730]: DEBUG oslo_vmware.api [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Task: {'id': task-4837091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07124} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.644715] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.644973] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 685.645073] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.645234] env[62730]: INFO nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Took 0.60 seconds to destroy the instance on the hypervisor. [ 685.647365] env[62730]: DEBUG nova.compute.claims [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 685.647540] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.647766] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.650475] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-38790ef8-2231-4f29-b6a4-9f73ffb467dd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.678269] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 685.744560] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 685.810686] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 685.810686] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 686.225570] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aefe36e-519f-47f3-823d-1ad8cdb906e8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.233394] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2017504-766a-4152-9c47-83605ac3fa5d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.265730] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7bad3a-9568-47af-aace-e7df1bf9f11d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.272530] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097e2538-beed-46bf-b38f-d6dc8247e64a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.286260] env[62730]: DEBUG nova.compute.provider_tree [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.297972] env[62730]: DEBUG nova.scheduler.client.report [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 686.315493] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.668s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.316068] env[62730]: ERROR nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.316068] env[62730]: Faults: ['InvalidArgument'] [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Traceback (most recent call last): [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self.driver.spawn(context, instance, image_meta, [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self._fetch_image_if_missing(context, vi) [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] image_cache(vi, tmp_image_ds_loc) [ 686.316068] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] vm_util.copy_virtual_disk( [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] session._wait_for_task(vmdk_copy_task) [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] return self.wait_for_task(task_ref) [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] return evt.wait() [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] result = hub.switch() [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] return self.greenlet.switch() [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 686.316386] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] self.f(*self.args, **self.kw) [ 686.316681] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 686.316681] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] raise exceptions.translate_fault(task_info.error) [ 686.316681] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.316681] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Faults: ['InvalidArgument'] [ 686.316681] env[62730]: ERROR nova.compute.manager [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] [ 686.316801] env[62730]: DEBUG nova.compute.utils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.325897] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Build of instance ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce was re-scheduled: A specified parameter was not correct: fileType [ 686.325897] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 686.326339] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 686.326517] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 686.326690] env[62730]: DEBUG nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 686.326876] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 686.900915] env[62730]: DEBUG nova.network.neutron [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.919604] env[62730]: INFO nova.compute.manager [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce] Took 0.59 seconds to deallocate network for instance. [ 687.042017] env[62730]: INFO nova.scheduler.client.report [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Deleted allocations for instance ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce [ 687.067773] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbe923e1-9e5b-41cc-87db-c29869893de1 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "ac0c40c2-2577-4eae-b3af-96d2ceeeb4ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.167s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.078604] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 687.140265] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.140265] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.140265] env[62730]: INFO nova.compute.claims [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.687662] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3b3a4d-e692-4c24-85d7-4f6c3597d2b8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.695572] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371948ae-2a3c-49c0-a82a-dd2cd2ddba92 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.726732] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5496c507-6d77-4c3f-8319-3bf30eb151c3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.735496] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f42228-b9dc-45be-bd1a-c30bcd37c81f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.750340] env[62730]: DEBUG nova.compute.provider_tree [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.759136] env[62730]: DEBUG nova.scheduler.client.report [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.775132] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.637s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.775673] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 687.816495] env[62730]: DEBUG nova.compute.utils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.818056] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 687.818238] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 687.827213] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 687.900174] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 687.918910] env[62730]: DEBUG nova.policy [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83152a751fa94aa5bc99f32fabb7da8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '976763dbb98a4b04a9cda2b0a5482452', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 687.927524] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.927770] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.927931] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.928127] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.928280] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.928432] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.928642] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.928880] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.929089] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.929263] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.929438] env[62730]: DEBUG nova.virt.hardware [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.930316] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cb897b-313a-4983-b0b0-82457e52e0cd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.939102] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7d49c7-8c03-4047-84d7-0d42e604ad86 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.564133] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Successfully created port: bb292e67-6e81-4189-b827-33f56347b317 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.879059] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.879394] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.995886] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Successfully updated port: bb292e67-6e81-4189-b827-33f56347b317 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 690.008313] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "refresh_cache-540af840-eba5-4cee-a37c-6d6809a24f95" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.008470] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquired lock "refresh_cache-540af840-eba5-4cee-a37c-6d6809a24f95" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.008612] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 690.092488] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 690.492549] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Updating instance_info_cache with network_info: [{"id": "bb292e67-6e81-4189-b827-33f56347b317", "address": "fa:16:3e:0f:a5:78", "network": {"id": "0298efd8-524f-48ee-ac51-4d55b24ea8b6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-535478287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "976763dbb98a4b04a9cda2b0a5482452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb292e67-6e", "ovs_interfaceid": "bb292e67-6e81-4189-b827-33f56347b317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.507295] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Releasing lock "refresh_cache-540af840-eba5-4cee-a37c-6d6809a24f95" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.507681] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Instance network_info: |[{"id": "bb292e67-6e81-4189-b827-33f56347b317", "address": "fa:16:3e:0f:a5:78", "network": {"id": "0298efd8-524f-48ee-ac51-4d55b24ea8b6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-535478287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "976763dbb98a4b04a9cda2b0a5482452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb292e67-6e", "ovs_interfaceid": "bb292e67-6e81-4189-b827-33f56347b317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 690.508144] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:a5:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb292e67-6e81-4189-b827-33f56347b317', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.516170] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Creating folder: Project (976763dbb98a4b04a9cda2b0a5482452). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 690.517713] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-788a044c-ddd0-4d47-856c-dc325c8efad9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.522545] env[62730]: DEBUG nova.compute.manager [req-aefaba18-edee-4b4c-be68-d2d651e724b4 req-77a61444-c00a-4ce7-ab91-99d32573bdcb service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Received event network-vif-plugged-bb292e67-6e81-4189-b827-33f56347b317 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 690.522782] env[62730]: DEBUG oslo_concurrency.lockutils [req-aefaba18-edee-4b4c-be68-d2d651e724b4 req-77a61444-c00a-4ce7-ab91-99d32573bdcb service nova] Acquiring lock "540af840-eba5-4cee-a37c-6d6809a24f95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.523010] env[62730]: DEBUG oslo_concurrency.lockutils [req-aefaba18-edee-4b4c-be68-d2d651e724b4 req-77a61444-c00a-4ce7-ab91-99d32573bdcb service nova] Lock "540af840-eba5-4cee-a37c-6d6809a24f95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.523190] env[62730]: DEBUG oslo_concurrency.lockutils [req-aefaba18-edee-4b4c-be68-d2d651e724b4 req-77a61444-c00a-4ce7-ab91-99d32573bdcb service nova] Lock "540af840-eba5-4cee-a37c-6d6809a24f95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.523363] env[62730]: DEBUG nova.compute.manager [req-aefaba18-edee-4b4c-be68-d2d651e724b4 req-77a61444-c00a-4ce7-ab91-99d32573bdcb service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] No waiting events found dispatching network-vif-plugged-bb292e67-6e81-4189-b827-33f56347b317 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 690.523529] env[62730]: WARNING nova.compute.manager [req-aefaba18-edee-4b4c-be68-d2d651e724b4 req-77a61444-c00a-4ce7-ab91-99d32573bdcb service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Received unexpected event network-vif-plugged-bb292e67-6e81-4189-b827-33f56347b317 for instance with vm_state building and task_state spawning. [ 690.533878] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Created folder: Project (976763dbb98a4b04a9cda2b0a5482452) in parent group-v942928. [ 690.534253] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Creating folder: Instances. Parent ref: group-v942961. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 690.534450] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a1c38a1-e6e0-4ba3-9033-6fbf993747a4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.544842] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Created folder: Instances in parent group-v942961. [ 690.546168] env[62730]: DEBUG oslo.service.loopingcall [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.546168] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 690.546168] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7dd9c805-e1ca-44f9-890f-1b38e779540e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.568563] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.568563] env[62730]: value = "task-4837094" [ 690.568563] env[62730]: _type = "Task" [ 690.568563] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.577032] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837094, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.079999] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837094, 'name': CreateVM_Task, 'duration_secs': 0.35191} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.080339] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 691.081137] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.081459] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.081784] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 691.082049] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95ac3a13-833f-441e-aef0-372ea9c90fce {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.087471] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Waiting for the task: (returnval){ [ 691.087471] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ad3905-10a3-0938-92a2-3604c8b48b4e" [ 691.087471] env[62730]: _type = "Task" [ 691.087471] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.097307] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ad3905-10a3-0938-92a2-3604c8b48b4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.597613] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.597882] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 691.598111] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.853982] env[62730]: DEBUG nova.compute.manager [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Received event network-changed-bb292e67-6e81-4189-b827-33f56347b317 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 692.854296] env[62730]: DEBUG nova.compute.manager [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Refreshing instance network info cache due to event network-changed-bb292e67-6e81-4189-b827-33f56347b317. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 692.854589] env[62730]: DEBUG oslo_concurrency.lockutils [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] Acquiring lock "refresh_cache-540af840-eba5-4cee-a37c-6d6809a24f95" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.854729] env[62730]: DEBUG oslo_concurrency.lockutils [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] Acquired lock "refresh_cache-540af840-eba5-4cee-a37c-6d6809a24f95" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.854950] env[62730]: DEBUG nova.network.neutron [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Refreshing network info cache for port bb292e67-6e81-4189-b827-33f56347b317 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 693.372361] env[62730]: DEBUG nova.network.neutron [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Updated VIF entry in instance network info cache for port bb292e67-6e81-4189-b827-33f56347b317. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 693.372807] env[62730]: DEBUG nova.network.neutron [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Updating instance_info_cache with network_info: [{"id": "bb292e67-6e81-4189-b827-33f56347b317", "address": "fa:16:3e:0f:a5:78", "network": {"id": "0298efd8-524f-48ee-ac51-4d55b24ea8b6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-535478287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "976763dbb98a4b04a9cda2b0a5482452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb292e67-6e", "ovs_interfaceid": "bb292e67-6e81-4189-b827-33f56347b317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.382363] env[62730]: DEBUG oslo_concurrency.lockutils [req-d0f92455-8aa3-44e2-86a3-288c71bd47a4 req-7efc8509-336b-4619-93c6-4e6cba15c3ae service nova] Releasing lock "refresh_cache-540af840-eba5-4cee-a37c-6d6809a24f95" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.756487] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.741021] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.741021] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.738974] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.739316] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 725.738072] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 725.738072] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 725.738072] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 725.759155] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.759388] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.759492] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.759716] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.759777] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.759922] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.760017] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.760108] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.760230] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.760350] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 725.760472] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 726.736819] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.737072] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.737224] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 726.737381] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.749886] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.750118] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.750283] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.750441] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 726.751603] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9654486b-1ee9-48b5-8d21-7080ae3afe6d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.762540] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42ca0d6-6511-4d21-8961-ecc639094dc3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.777607] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef87a52c-5480-47a3-b093-a65e10bda696 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.784839] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a421b0-4e43-4078-8828-be7d3e997b5c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.816197] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180519MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 726.816389] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.816550] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.901033] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 736075f4-302b-4b1a-9358-7fe2fb73a36f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901134] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0a718440-a0f8-4614-a9f3-553b2ff2e156 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901265] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 318f7880-c500-40b8-9ca1-d8a857b36a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901388] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16f7dfdb-2063-4992-9f40-4b332006940f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901510] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901632] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901752] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d90fd82e-a469-41c7-b414-c7eb5554e72a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901868] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.901981] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.902106] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 726.915023] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 726.926227] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c62428f6-0693-4ae7-81ae-eacb56821c3b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 726.938945] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2074f279-f5f2-4048-abf5-ee61bd9f5002 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 726.949613] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6eab5473-6c72-4bdb-8f84-56de17441f3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 726.959595] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1ac41735-b0b9-428e-8644-13490403d53e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 726.969447] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 726.979420] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f4408a1f-d3f2-4e1e-ba96-cd509166e31d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 726.991945] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2a5014e6-835c-45fd-b723-a968782dda58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.002961] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a897e28b-32bc-4726-ac37-c99dc2efb75d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.015646] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2f233d8c-7e64-433e-82aa-ca4b1b2a1798 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.027105] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.038934] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1658258-9147-431c-9e6d-5f8360523c23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.049689] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9d39c196-4ab4-4a97-9c82-44b4a4b107a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.060760] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b7273f62-c330-4b6a-a6e3-39d76c46aac9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.072412] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1dc56d8a-02e3-4441-9bb5-f091ecac835a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.083408] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8656d64-907d-4524-905a-aa67a4ad1f63 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.094424] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6d8cad34-699c-4dcc-8f83-e21490f82b8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.104966] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cb5b5e10-41e5-497e-b409-0a83fa00896c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.115894] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91907341-29ff-42b3-a25f-a9e990af4de1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.126292] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.137922] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4cefd92c-8058-4e3e-a175-4807a84e0b3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.147954] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 45bb8da2-c544-4935-a3a8-62305e599c06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.157425] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0e7106c5-fca6-4d97-a6dd-f0670ca42202 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.167830] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3d3b40de-1123-44ed-b241-746731c3097c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.178546] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.189318] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 727.189569] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 727.189746] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '3', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_42256023b89344de90ced8c51fd48cf6': '1', 'io_workload': '10', 'num_proj_b825311d36404f199e86101b21b30ad5': '1', 'num_proj_76b57f69c45049f4b76e1ea4c1f78513': '1', 'num_proj_cf705f506bcc4409881416d80a745afc': '2', 'num_proj_7e26a6097b8c4bf3b6d4b77656087f8c': '2', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 727.615736] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815de1c9-72c7-41f3-8638-a7f563e8d523 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.624187] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28201ad7-40ea-48fb-905c-22dca8b77f6b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.655730] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c4a541-8c98-4fa9-9498-6a4f1fbd9431 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.663856] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0def49-4f85-46e3-a434-b514f522fd7a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.677551] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.686955] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.701575] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 727.701854] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.885s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.697816] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.883803] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "736075f4-302b-4b1a-9358-7fe2fb73a36f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.555021] env[62730]: WARNING oslo_vmware.rw_handles [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 734.555021] env[62730]: ERROR oslo_vmware.rw_handles [ 734.555612] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 734.557192] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 734.557443] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Copying Virtual Disk [datastore2] vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/b609c518-5353-4f20-b26a-e39a0afe3eea/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 734.557749] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a56ee9c-93de-42a7-9c49-bce2e9955d0a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.565781] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Waiting for the task: (returnval){ [ 734.565781] env[62730]: value = "task-4837095" [ 734.565781] env[62730]: _type = "Task" [ 734.565781] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.574209] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Task: {'id': task-4837095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.079048] env[62730]: DEBUG oslo_vmware.exceptions [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 735.079048] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.079048] env[62730]: ERROR nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 735.079048] env[62730]: Faults: ['InvalidArgument'] [ 735.079048] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Traceback (most recent call last): [ 735.079048] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 735.079048] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] yield resources [ 735.079048] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self.driver.spawn(context, instance, image_meta, [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self._fetch_image_if_missing(context, vi) [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] image_cache(vi, tmp_image_ds_loc) [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] vm_util.copy_virtual_disk( [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] session._wait_for_task(vmdk_copy_task) [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 735.079471] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] return self.wait_for_task(task_ref) [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] return evt.wait() [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] result = hub.switch() [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] return self.greenlet.switch() [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self.f(*self.args, **self.kw) [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] raise exceptions.translate_fault(task_info.error) [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Faults: ['InvalidArgument'] [ 735.079802] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] [ 735.080148] env[62730]: INFO nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Terminating instance [ 735.081682] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.081939] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.082715] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 735.082969] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 735.083242] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3099c981-8e76-4e52-8958-5e13b41b12f3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.085741] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fae670-f168-4aba-abaf-d8b4aac67309 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.093427] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 735.093711] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b66e79b4-be73-4356-aabc-ce228052b887 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.096149] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.096323] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 735.097360] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e92cce1-df30-43e4-8d97-649b4550b111 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.103039] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Waiting for the task: (returnval){ [ 735.103039] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52047e81-2f3b-a407-e2d5-c731ab7e0b88" [ 735.103039] env[62730]: _type = "Task" [ 735.103039] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.112344] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52047e81-2f3b-a407-e2d5-c731ab7e0b88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.169886] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 735.170098] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 735.170421] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Deleting the datastore file [datastore2] 736075f4-302b-4b1a-9358-7fe2fb73a36f {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 735.170657] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa5ed386-b7b4-4655-a29f-dde852fd1bc6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.177916] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Waiting for the task: (returnval){ [ 735.177916] env[62730]: value = "task-4837097" [ 735.177916] env[62730]: _type = "Task" [ 735.177916] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.187327] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Task: {'id': task-4837097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.616183] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 735.616482] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Creating directory with path [datastore2] vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.616691] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5fddc49-2bed-43cd-ba2d-570fee4bd99e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.629532] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Created directory with path [datastore2] vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.629732] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Fetch image to [datastore2] vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 735.629907] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 735.630690] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcdad94-b38c-4f69-afc1-6869e21296fe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.638072] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e45ce2-8765-49b6-bf5f-7bb100ad6c75 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.647388] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef0489d-c1e7-4078-a320-5d7bad0b36fa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.678416] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200a4475-bbcc-4dcd-abbd-b5fc0de1231e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.689123] env[62730]: DEBUG oslo_vmware.api [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Task: {'id': task-4837097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088134} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.690740] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.690937] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 735.691131] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 735.691313] env[62730]: INFO nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 735.693161] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bf7cde38-d607-4bbb-9a0c-ef503dede682 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.695892] env[62730]: DEBUG nova.compute.claims [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 735.696082] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.696299] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.718492] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 735.781480] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 735.842297] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 735.842487] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 736.278695] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182bc17f-f4e7-49be-8138-b062190bf6fd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.287381] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09c786d-1cb0-4a3f-8fe8-017e28e53370 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.325824] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f532e2d0-6723-4ad6-8c46-ac2d8edbb1ad {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.333522] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04cb11d-e4a4-4944-9733-163015a16293 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.348322] env[62730]: DEBUG nova.compute.provider_tree [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.358474] env[62730]: DEBUG nova.scheduler.client.report [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.377043] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.680s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.378109] env[62730]: ERROR nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.378109] env[62730]: Faults: ['InvalidArgument'] [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Traceback (most recent call last): [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self.driver.spawn(context, instance, image_meta, [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self._fetch_image_if_missing(context, vi) [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] image_cache(vi, tmp_image_ds_loc) [ 736.378109] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] vm_util.copy_virtual_disk( [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] session._wait_for_task(vmdk_copy_task) [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] return self.wait_for_task(task_ref) [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] return evt.wait() [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] result = hub.switch() [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] return self.greenlet.switch() [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 736.378418] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] self.f(*self.args, **self.kw) [ 736.378718] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 736.378718] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] raise exceptions.translate_fault(task_info.error) [ 736.378718] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.378718] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Faults: ['InvalidArgument'] [ 736.378718] env[62730]: ERROR nova.compute.manager [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] [ 736.378913] env[62730]: DEBUG nova.compute.utils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 736.382457] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Build of instance 736075f4-302b-4b1a-9358-7fe2fb73a36f was re-scheduled: A specified parameter was not correct: fileType [ 736.382457] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 736.382457] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 736.382457] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 736.382457] env[62730]: DEBUG nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 736.382920] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 737.166375] env[62730]: DEBUG nova.network.neutron [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.178743] env[62730]: INFO nova.compute.manager [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Took 0.80 seconds to deallocate network for instance. [ 737.288082] env[62730]: INFO nova.scheduler.client.report [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Deleted allocations for instance 736075f4-302b-4b1a-9358-7fe2fb73a36f [ 737.312807] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f80c51c-f18e-48d9-b4d1-8fae294c38a2 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.574s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.314371] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 4.429s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.314371] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Acquiring lock "736075f4-302b-4b1a-9358-7fe2fb73a36f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.314371] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.314703] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.317542] env[62730]: INFO nova.compute.manager [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Terminating instance [ 737.322809] env[62730]: DEBUG nova.compute.manager [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 737.323323] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 737.323637] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db4f2171-7ee9-4eda-bc13-95feb9a208bf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.336173] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50cc9ec6-d12e-4ec0-9b62-57434fc66056 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.358676] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 737.373595] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 736075f4-302b-4b1a-9358-7fe2fb73a36f could not be found. [ 737.373835] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 737.374265] env[62730]: INFO nova.compute.manager [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 737.374340] env[62730]: DEBUG oslo.service.loopingcall [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.374878] env[62730]: DEBUG nova.compute.manager [-] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 737.374968] env[62730]: DEBUG nova.network.neutron [-] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 737.420475] env[62730]: DEBUG nova.network.neutron [-] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.432458] env[62730]: INFO nova.compute.manager [-] [instance: 736075f4-302b-4b1a-9358-7fe2fb73a36f] Took 0.06 seconds to deallocate network for instance. [ 737.441408] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.441709] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.443203] env[62730]: INFO nova.compute.claims [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.580798] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5f2e75d2-8bdd-4892-b0dc-d1974c746e06 tempest-FloatingIPsAssociationNegativeTestJSON-32558767 tempest-FloatingIPsAssociationNegativeTestJSON-32558767-project-member] Lock "736075f4-302b-4b1a-9358-7fe2fb73a36f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.268s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.906896] env[62730]: DEBUG oslo_concurrency.lockutils [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "0a718440-a0f8-4614-a9f3-553b2ff2e156" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.998301] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcac8bd-0c24-489f-9139-0396033dd299 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.006751] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a3b06d-509a-4926-a798-e6755eab3056 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.042120] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be114941-df9f-47bc-bd5b-125e24417df6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.050491] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f3600b-ca58-4620-9404-9a7a3e99f70b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.064848] env[62730]: DEBUG nova.compute.provider_tree [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.073188] env[62730]: DEBUG nova.scheduler.client.report [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.087802] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.646s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.088356] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 738.124185] env[62730]: DEBUG nova.compute.utils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.125896] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 738.126082] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 738.140033] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 738.207926] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 738.235818] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 738.236210] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 738.236505] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.236845] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 738.237126] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.237386] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 738.237665] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 738.237841] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 738.238028] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 738.238201] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 738.238378] env[62730]: DEBUG nova.virt.hardware [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.239276] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e3380c-13ba-47fa-9fda-ceb982e9af7d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.250138] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f21d2d-c46c-42e3-a3d7-5cd14c6cd826 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.270767] env[62730]: DEBUG nova.policy [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '61e8257dde7e4492a61c0b3a2979c39f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9a9c0281e6f463aab4a2f5fcb1019a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 738.574330] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "318f7880-c500-40b8-9ca1-d8a857b36a88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.980295] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Successfully created port: dbbd465f-6c60-4627-b7cc-c6bf9d91acee {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.287679] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Successfully updated port: dbbd465f-6c60-4627-b7cc-c6bf9d91acee {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 740.308451] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "refresh_cache-986e37d4-d3ae-42a0-8caa-39b92636b973" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.308612] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquired lock "refresh_cache-986e37d4-d3ae-42a0-8caa-39b92636b973" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.308768] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 740.614795] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.661863] env[62730]: DEBUG nova.compute.manager [req-84c8f30a-f200-40f3-ad23-26676ead7ebe req-6eba6124-1145-4919-864c-3a08940615cb service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Received event network-vif-plugged-dbbd465f-6c60-4627-b7cc-c6bf9d91acee {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 740.662172] env[62730]: DEBUG oslo_concurrency.lockutils [req-84c8f30a-f200-40f3-ad23-26676ead7ebe req-6eba6124-1145-4919-864c-3a08940615cb service nova] Acquiring lock "986e37d4-d3ae-42a0-8caa-39b92636b973-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.662465] env[62730]: DEBUG oslo_concurrency.lockutils [req-84c8f30a-f200-40f3-ad23-26676ead7ebe req-6eba6124-1145-4919-864c-3a08940615cb service nova] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.662728] env[62730]: DEBUG oslo_concurrency.lockutils [req-84c8f30a-f200-40f3-ad23-26676ead7ebe req-6eba6124-1145-4919-864c-3a08940615cb service nova] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.662947] env[62730]: DEBUG nova.compute.manager [req-84c8f30a-f200-40f3-ad23-26676ead7ebe req-6eba6124-1145-4919-864c-3a08940615cb service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] No waiting events found dispatching network-vif-plugged-dbbd465f-6c60-4627-b7cc-c6bf9d91acee {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 740.664170] env[62730]: WARNING nova.compute.manager [req-84c8f30a-f200-40f3-ad23-26676ead7ebe req-6eba6124-1145-4919-864c-3a08940615cb service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Received unexpected event network-vif-plugged-dbbd465f-6c60-4627-b7cc-c6bf9d91acee for instance with vm_state building and task_state spawning. [ 740.964590] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Updating instance_info_cache with network_info: [{"id": "dbbd465f-6c60-4627-b7cc-c6bf9d91acee", "address": "fa:16:3e:da:07:a2", "network": {"id": "602e1b02-72b1-49ab-9335-eeca74485ea3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-768378263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9a9c0281e6f463aab4a2f5fcb1019a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbd465f-6c", "ovs_interfaceid": "dbbd465f-6c60-4627-b7cc-c6bf9d91acee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.978489] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Releasing lock "refresh_cache-986e37d4-d3ae-42a0-8caa-39b92636b973" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.981699] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Instance network_info: |[{"id": "dbbd465f-6c60-4627-b7cc-c6bf9d91acee", "address": "fa:16:3e:da:07:a2", "network": {"id": "602e1b02-72b1-49ab-9335-eeca74485ea3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-768378263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9a9c0281e6f463aab4a2f5fcb1019a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbd465f-6c", "ovs_interfaceid": "dbbd465f-6c60-4627-b7cc-c6bf9d91acee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 740.982164] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:07:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbbd465f-6c60-4627-b7cc-c6bf9d91acee', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.986802] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Creating folder: Project (b9a9c0281e6f463aab4a2f5fcb1019a1). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.987232] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12596ace-5da0-4e26-9e98-75de2281dcea {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.998862] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Created folder: Project (b9a9c0281e6f463aab4a2f5fcb1019a1) in parent group-v942928. [ 740.998960] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Creating folder: Instances. Parent ref: group-v942964. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.999882] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b894d2b-c4ef-46da-a50d-656794e8a8a1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.008822] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Created folder: Instances in parent group-v942964. [ 741.009096] env[62730]: DEBUG oslo.service.loopingcall [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 741.009291] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 741.009497] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11ddcd98-d3f8-42b8-bbab-06c9bf0f2b39 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.031898] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.031898] env[62730]: value = "task-4837100" [ 741.031898] env[62730]: _type = "Task" [ 741.031898] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.040391] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837100, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.542524] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837100, 'name': CreateVM_Task, 'duration_secs': 0.385206} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.542809] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 741.543399] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.543567] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.543898] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.544182] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19032c40-96eb-483d-beb1-3c71eb2e0ca5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.549700] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Waiting for the task: (returnval){ [ 741.549700] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ef0d33-ae8b-9a63-8d50-297d7d2ffb95" [ 741.549700] env[62730]: _type = "Task" [ 741.549700] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.559378] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ef0d33-ae8b-9a63-8d50-297d7d2ffb95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.061820] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.062278] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.062674] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.867622] env[62730]: DEBUG nova.compute.manager [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Received event network-changed-dbbd465f-6c60-4627-b7cc-c6bf9d91acee {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 742.867622] env[62730]: DEBUG nova.compute.manager [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Refreshing instance network info cache due to event network-changed-dbbd465f-6c60-4627-b7cc-c6bf9d91acee. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 742.867622] env[62730]: DEBUG oslo_concurrency.lockutils [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] Acquiring lock "refresh_cache-986e37d4-d3ae-42a0-8caa-39b92636b973" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.867622] env[62730]: DEBUG oslo_concurrency.lockutils [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] Acquired lock "refresh_cache-986e37d4-d3ae-42a0-8caa-39b92636b973" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.867622] env[62730]: DEBUG nova.network.neutron [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Refreshing network info cache for port dbbd465f-6c60-4627-b7cc-c6bf9d91acee {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 743.357146] env[62730]: DEBUG nova.network.neutron [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Updated VIF entry in instance network info cache for port dbbd465f-6c60-4627-b7cc-c6bf9d91acee. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 743.357146] env[62730]: DEBUG nova.network.neutron [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Updating instance_info_cache with network_info: [{"id": "dbbd465f-6c60-4627-b7cc-c6bf9d91acee", "address": "fa:16:3e:da:07:a2", "network": {"id": "602e1b02-72b1-49ab-9335-eeca74485ea3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-768378263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9a9c0281e6f463aab4a2f5fcb1019a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbd465f-6c", "ovs_interfaceid": "dbbd465f-6c60-4627-b7cc-c6bf9d91acee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.372640] env[62730]: DEBUG oslo_concurrency.lockutils [req-0b3e1ac7-ce2b-4533-b8de-2e84739ba340 req-86ad1b2b-6f0d-46f3-b64d-e446591d8bbd service nova] Releasing lock "refresh_cache-986e37d4-d3ae-42a0-8caa-39b92636b973" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.574028] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.574307] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.604788] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "8504a95d-6003-4698-a3b5-4913eb59c932" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.605069] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "8504a95d-6003-4698-a3b5-4913eb59c932" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.954968] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.078399] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "d90fd82e-a469-41c7-b414-c7eb5554e72a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.843295] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.061227] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.179220] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "540af840-eba5-4cee-a37c-6d6809a24f95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.020595] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "986e37d4-d3ae-42a0-8caa-39b92636b973" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.347443] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45b3bb65-63ea-478a-b5b7-9b7a570f82ca tempest-ServersListShow296Test-183377682 tempest-ServersListShow296Test-183377682-project-member] Acquiring lock "13fb51cc-7cfc-44f9-9a15-381762007fe7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.347443] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45b3bb65-63ea-478a-b5b7-9b7a570f82ca tempest-ServersListShow296Test-183377682 tempest-ServersListShow296Test-183377682-project-member] Lock "13fb51cc-7cfc-44f9-9a15-381762007fe7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.737750] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.738061] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 782.758796] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] There are 0 instances to clean {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 782.759081] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.759238] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances with incomplete migration {{(pid=62730) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 782.771074] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 783.778411] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 784.569450] env[62730]: WARNING oslo_vmware.rw_handles [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 784.569450] env[62730]: ERROR oslo_vmware.rw_handles [ 784.570294] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 784.572845] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 784.572845] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Copying Virtual Disk [datastore2] vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/64f7d870-efe4-415c-8231-68465b773e1a/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 784.572845] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a82bfc3-bbe2-486c-a021-1b58ede75c8f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.583450] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Waiting for the task: (returnval){ [ 784.583450] env[62730]: value = "task-4837101" [ 784.583450] env[62730]: _type = "Task" [ 784.583450] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.597181] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Task: {'id': task-4837101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.738060] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 785.093479] env[62730]: DEBUG oslo_vmware.exceptions [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 785.093915] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.094645] env[62730]: ERROR nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 785.094645] env[62730]: Faults: ['InvalidArgument'] [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Traceback (most recent call last): [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] yield resources [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self.driver.spawn(context, instance, image_meta, [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self._vmops.spawn(context, instance, image_meta, injected_files, [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self._fetch_image_if_missing(context, vi) [ 785.094645] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] image_cache(vi, tmp_image_ds_loc) [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] vm_util.copy_virtual_disk( [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] session._wait_for_task(vmdk_copy_task) [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] return self.wait_for_task(task_ref) [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] return evt.wait() [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] result = hub.switch() [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 785.095082] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] return self.greenlet.switch() [ 785.095483] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 785.095483] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self.f(*self.args, **self.kw) [ 785.095483] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 785.095483] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] raise exceptions.translate_fault(task_info.error) [ 785.095483] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 785.095483] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Faults: ['InvalidArgument'] [ 785.095483] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] [ 785.095483] env[62730]: INFO nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Terminating instance [ 785.096771] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.097054] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.097798] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 785.098123] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 785.098326] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-321f8227-313c-497f-8bf3-04b2b78d0390 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.102461] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558251b9-1c35-414c-845e-642f3374f18f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.111843] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 785.112138] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ea00c9b-8932-4598-82d4-62aee812e57e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.115243] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.115530] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 785.117024] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-292685ed-73f6-4ab7-b806-da72523521ac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.123763] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Waiting for the task: (returnval){ [ 785.123763] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5275fab8-9aa3-a1ca-6434-fd13d34c2492" [ 785.123763] env[62730]: _type = "Task" [ 785.123763] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.133947] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5275fab8-9aa3-a1ca-6434-fd13d34c2492, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.193844] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 785.194202] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 785.194474] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Deleting the datastore file [datastore2] 0a718440-a0f8-4614-a9f3-553b2ff2e156 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 785.194782] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d198590-65ff-476b-bf68-b12fe2c811e5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.206280] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Waiting for the task: (returnval){ [ 785.206280] env[62730]: value = "task-4837103" [ 785.206280] env[62730]: _type = "Task" [ 785.206280] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.213918] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Task: {'id': task-4837103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.573578] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2dbacb2-8582-48c6-a71e-4c0fdd5d7ff2 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "cced1efc-f73f-43a5-8a13-de65ef5703b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.573578] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2dbacb2-8582-48c6-a71e-4c0fdd5d7ff2 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "cced1efc-f73f-43a5-8a13-de65ef5703b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.635798] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 785.636078] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Creating directory with path [datastore2] vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.636327] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1b0fc45-7a6c-490e-a14c-496626df8b98 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.648483] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Created directory with path [datastore2] vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.648689] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Fetch image to [datastore2] vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 785.648861] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 785.649763] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe5830f-bd7c-4a66-b5cb-6976af253dce {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.660234] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bba49c-eebf-4f8b-8803-9935a8d37a0a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.676181] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79286155-3fbb-4d00-adec-e19834688cce {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.707827] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd930e6-f786-4e4d-953e-f15e05339b96 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.718364] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9bcc2430-e47b-4074-9c67-223a556b7e83 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.720105] env[62730]: DEBUG oslo_vmware.api [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Task: {'id': task-4837103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081447} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.720363] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 785.720549] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 785.720714] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 785.720887] env[62730]: INFO nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Took 0.62 seconds to destroy the instance on the hypervisor. [ 785.722979] env[62730]: DEBUG nova.compute.claims [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 785.723185] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.725017] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.737977] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 785.745285] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 785.817791] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 785.881322] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 785.881588] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 786.507303] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a48e70-377b-40d7-9422-2505d8f6d606 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.518058] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4691b161-af04-4363-b078-e2c5d5b0c52a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.551916] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca4a552-89c7-4f6c-bcde-aee6244825b9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.560021] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91a00d6-1263-44af-a7f9-4df420580db7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.574367] env[62730]: DEBUG nova.compute.provider_tree [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.588863] env[62730]: DEBUG nova.scheduler.client.report [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.612060] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.888s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.613456] env[62730]: ERROR nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 786.613456] env[62730]: Faults: ['InvalidArgument'] [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Traceback (most recent call last): [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self.driver.spawn(context, instance, image_meta, [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self._fetch_image_if_missing(context, vi) [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] image_cache(vi, tmp_image_ds_loc) [ 786.613456] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] vm_util.copy_virtual_disk( [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] session._wait_for_task(vmdk_copy_task) [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] return self.wait_for_task(task_ref) [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] return evt.wait() [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] result = hub.switch() [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] return self.greenlet.switch() [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 786.613899] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] self.f(*self.args, **self.kw) [ 786.614385] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 786.614385] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] raise exceptions.translate_fault(task_info.error) [ 786.614385] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 786.614385] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Faults: ['InvalidArgument'] [ 786.614385] env[62730]: ERROR nova.compute.manager [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] [ 786.614385] env[62730]: DEBUG nova.compute.utils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.615486] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Build of instance 0a718440-a0f8-4614-a9f3-553b2ff2e156 was re-scheduled: A specified parameter was not correct: fileType [ 786.615486] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 786.615875] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 786.616058] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 786.616214] env[62730]: DEBUG nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 786.616375] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 786.737803] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.737803] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.427743] env[62730]: DEBUG nova.network.neutron [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.445366] env[62730]: INFO nova.compute.manager [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Took 0.83 seconds to deallocate network for instance. [ 787.632048] env[62730]: INFO nova.scheduler.client.report [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Deleted allocations for instance 0a718440-a0f8-4614-a9f3-553b2ff2e156 [ 787.655109] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1d9dfc29-7e3f-4718-8c84-01b56940d4c9 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 250.205s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.655109] env[62730]: DEBUG oslo_concurrency.lockutils [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 49.748s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.655109] env[62730]: DEBUG oslo_concurrency.lockutils [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "0a718440-a0f8-4614-a9f3-553b2ff2e156-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.655295] env[62730]: DEBUG oslo_concurrency.lockutils [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.655677] env[62730]: DEBUG oslo_concurrency.lockutils [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.657980] env[62730]: INFO nova.compute.manager [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Terminating instance [ 787.660057] env[62730]: DEBUG nova.compute.manager [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 787.660375] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 787.661569] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c281abbc-e248-47c2-a33b-4cdf26684721 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.672350] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf76bed-695b-4200-ab2e-c7a2c43260d8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.692786] env[62730]: DEBUG nova.compute.manager [None req-b69749c6-e6f5-42c8-bcc5-415dd681a6f6 tempest-ServersTestFqdnHostnames-351985442 tempest-ServersTestFqdnHostnames-351985442-project-member] [instance: c62428f6-0693-4ae7-81ae-eacb56821c3b] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 787.711641] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a718440-a0f8-4614-a9f3-553b2ff2e156 could not be found. [ 787.711863] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 787.712636] env[62730]: INFO nova.compute.manager [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Took 0.05 seconds to destroy the instance on the hypervisor. [ 787.714277] env[62730]: DEBUG oslo.service.loopingcall [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.714612] env[62730]: DEBUG nova.compute.manager [-] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 787.714728] env[62730]: DEBUG nova.network.neutron [-] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.729418] env[62730]: DEBUG nova.compute.manager [None req-b69749c6-e6f5-42c8-bcc5-415dd681a6f6 tempest-ServersTestFqdnHostnames-351985442 tempest-ServersTestFqdnHostnames-351985442-project-member] [instance: c62428f6-0693-4ae7-81ae-eacb56821c3b] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 787.737770] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.737949] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 787.738084] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 787.755514] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b69749c6-e6f5-42c8-bcc5-415dd681a6f6 tempest-ServersTestFqdnHostnames-351985442 tempest-ServersTestFqdnHostnames-351985442-project-member] Lock "c62428f6-0693-4ae7-81ae-eacb56821c3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.480s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.760868] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.761184] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.761345] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.761469] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.761589] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.761705] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.761817] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.761930] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.762051] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 787.762166] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 787.762388] env[62730]: DEBUG nova.network.neutron [-] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.771450] env[62730]: DEBUG nova.compute.manager [None req-b57f68fa-2c10-4722-9550-49da67838196 tempest-ServersAdmin275Test-133488872 tempest-ServersAdmin275Test-133488872-project-member] [instance: 2074f279-f5f2-4048-abf5-ee61bd9f5002] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 787.775810] env[62730]: INFO nova.compute.manager [-] [instance: 0a718440-a0f8-4614-a9f3-553b2ff2e156] Took 0.06 seconds to deallocate network for instance. [ 787.831289] env[62730]: DEBUG nova.compute.manager [None req-b57f68fa-2c10-4722-9550-49da67838196 tempest-ServersAdmin275Test-133488872 tempest-ServersAdmin275Test-133488872-project-member] [instance: 2074f279-f5f2-4048-abf5-ee61bd9f5002] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 787.862608] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b57f68fa-2c10-4722-9550-49da67838196 tempest-ServersAdmin275Test-133488872 tempest-ServersAdmin275Test-133488872-project-member] Lock "2074f279-f5f2-4048-abf5-ee61bd9f5002" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.245s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.877243] env[62730]: DEBUG nova.compute.manager [None req-2d566888-1b41-4987-845e-df54b014bf72 tempest-VolumesAdminNegativeTest-322510555 tempest-VolumesAdminNegativeTest-322510555-project-member] [instance: 6eab5473-6c72-4bdb-8f84-56de17441f3a] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 787.915717] env[62730]: DEBUG nova.compute.manager [None req-2d566888-1b41-4987-845e-df54b014bf72 tempest-VolumesAdminNegativeTest-322510555 tempest-VolumesAdminNegativeTest-322510555-project-member] [instance: 6eab5473-6c72-4bdb-8f84-56de17441f3a] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 787.941403] env[62730]: DEBUG oslo_concurrency.lockutils [None req-29d1c448-bec8-4264-aa0a-c04c2ff815e4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "0a718440-a0f8-4614-a9f3-553b2ff2e156" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.287s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.943608] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2d566888-1b41-4987-845e-df54b014bf72 tempest-VolumesAdminNegativeTest-322510555 tempest-VolumesAdminNegativeTest-322510555-project-member] Lock "6eab5473-6c72-4bdb-8f84-56de17441f3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.455s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.955317] env[62730]: DEBUG nova.compute.manager [None req-95b2345a-8f6d-422e-90bc-ad1d343a8754 tempest-AttachInterfacesV270Test-1552484991 tempest-AttachInterfacesV270Test-1552484991-project-member] [instance: 1ac41735-b0b9-428e-8644-13490403d53e] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 788.019858] env[62730]: DEBUG nova.compute.manager [None req-95b2345a-8f6d-422e-90bc-ad1d343a8754 tempest-AttachInterfacesV270Test-1552484991 tempest-AttachInterfacesV270Test-1552484991-project-member] [instance: 1ac41735-b0b9-428e-8644-13490403d53e] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 788.053638] env[62730]: DEBUG oslo_concurrency.lockutils [None req-95b2345a-8f6d-422e-90bc-ad1d343a8754 tempest-AttachInterfacesV270Test-1552484991 tempest-AttachInterfacesV270Test-1552484991-project-member] Lock "1ac41735-b0b9-428e-8644-13490403d53e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.934s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.066091] env[62730]: DEBUG nova.compute.manager [None req-5fff3c03-bb4b-450d-8403-c4f4acd0c71c tempest-ServerActionsTestJSON-1709088567 tempest-ServerActionsTestJSON-1709088567-project-member] [instance: fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 788.094430] env[62730]: DEBUG nova.compute.manager [None req-5fff3c03-bb4b-450d-8403-c4f4acd0c71c tempest-ServerActionsTestJSON-1709088567 tempest-ServerActionsTestJSON-1709088567-project-member] [instance: fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 788.121482] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fff3c03-bb4b-450d-8403-c4f4acd0c71c tempest-ServerActionsTestJSON-1709088567 tempest-ServerActionsTestJSON-1709088567-project-member] Lock "fbfc5a14-9a1f-4d76-a1a4-8afc5833eaba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.146s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.131577] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 788.195416] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.195588] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.197176] env[62730]: INFO nova.compute.claims [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.283021] env[62730]: DEBUG nova.scheduler.client.report [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Refreshing inventories for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 788.300011] env[62730]: DEBUG nova.scheduler.client.report [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Updating ProviderTree inventory for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 788.300325] env[62730]: DEBUG nova.compute.provider_tree [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.322583] env[62730]: DEBUG nova.scheduler.client.report [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Refreshing aggregate associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, aggregates: None {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 788.344421] env[62730]: DEBUG nova.scheduler.client.report [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Refreshing trait associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 788.406373] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.737665] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.737665] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.737665] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 788.737665] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 788.753321] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.875659] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d38e29-9fc7-44fd-b373-18a0eb3e501b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.883646] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01806ddd-b349-4719-9030-38aee4dce937 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.924428] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b96d22-2f12-4580-ac70-49de7796a3dd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.933234] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46b2145-0d45-4681-bee5-c5b797693f42 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.949553] env[62730]: DEBUG nova.compute.provider_tree [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.961218] env[62730]: DEBUG nova.scheduler.client.report [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.984748] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.789s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.985261] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 788.987876] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.235s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.989320] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.989526] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 788.993747] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30697860-1907-4039-a80c-5d85d08975ef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.005375] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c896b7f-4a99-42e9-97bb-292834de89b1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.022878] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a35525-3889-4780-9344-2a335fa0dfc3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.031977] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f73a9b7-fa2a-4bcd-bf01-b85fd899dd6d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.038786] env[62730]: DEBUG nova.compute.claims [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 789.038970] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.039204] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.068888] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180536MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 789.068888] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.594926] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffd8468-6e13-48aa-9a02-76fdddfa28a4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.603467] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5190c0-09ac-4de3-8558-b59d7449b419 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.644554] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e7f6d6-63fe-4040-b2cf-a7e1962c1331 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.653601] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500fb6fc-4deb-4b83-a68f-bb8e223e1db5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.669061] env[62730]: DEBUG nova.compute.provider_tree [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.683391] env[62730]: DEBUG nova.scheduler.client.report [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 789.702053] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.663s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.702945] env[62730]: DEBUG nova.compute.utils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Conflict updating instance f4408a1f-d3f2-4e1e-ba96-cd509166e31d. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 789.704260] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.636s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.709027] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Instance disappeared during build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 789.709027] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 789.709027] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "refresh_cache-f4408a1f-d3f2-4e1e-ba96-cd509166e31d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.709027] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquired lock "refresh_cache-f4408a1f-d3f2-4e1e-ba96-cd509166e31d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.709331] env[62730]: DEBUG nova.network.neutron [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 789.766107] env[62730]: DEBUG nova.network.neutron [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 789.810447] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 318f7880-c500-40b8-9ca1-d8a857b36a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.810663] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16f7dfdb-2063-4992-9f40-4b332006940f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.812037] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.812037] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.812037] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d90fd82e-a469-41c7-b414-c7eb5554e72a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.812037] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.812342] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.812342] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.812342] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 789.824679] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f4408a1f-d3f2-4e1e-ba96-cd509166e31d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.839317] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.851739] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1658258-9147-431c-9e6d-5f8360523c23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.870760] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9d39c196-4ab4-4a97-9c82-44b4a4b107a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.882298] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b7273f62-c330-4b6a-a6e3-39d76c46aac9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.893711] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1dc56d8a-02e3-4441-9bb5-f091ecac835a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.907185] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8656d64-907d-4524-905a-aa67a4ad1f63 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.919330] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6d8cad34-699c-4dcc-8f83-e21490f82b8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.933258] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cb5b5e10-41e5-497e-b409-0a83fa00896c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.950286] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91907341-29ff-42b3-a25f-a9e990af4de1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.964280] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.976864] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4cefd92c-8058-4e3e-a175-4807a84e0b3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 789.990189] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 45bb8da2-c544-4935-a3a8-62305e599c06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.008182] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0e7106c5-fca6-4d97-a6dd-f0670ca42202 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.022630] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3d3b40de-1123-44ed-b241-746731c3097c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.033793] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.052523] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.071724] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.086238] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "01a34662-fef9-4855-ba3c-39184982fd0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.086487] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "01a34662-fef9-4855-ba3c-39184982fd0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.099132] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8504a95d-6003-4698-a3b5-4913eb59c932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.117153] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 13fb51cc-7cfc-44f9-9a15-381762007fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.129155] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cced1efc-f73f-43a5-8a13-de65ef5703b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 790.129416] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 790.129584] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '10', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '7', 'num_os_type_None': '9', 'num_proj_76b57f69c45049f4b76e1ea4c1f78513': '1', 'io_workload': '9', 'num_task_spawning': '2', 'num_proj_cf705f506bcc4409881416d80a745afc': '2', 'num_proj_7e26a6097b8c4bf3b6d4b77656087f8c': '2', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 790.334190] env[62730]: DEBUG nova.network.neutron [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.352030] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Releasing lock "refresh_cache-f4408a1f-d3f2-4e1e-ba96-cd509166e31d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.352030] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 790.352030] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 790.352030] env[62730]: DEBUG nova.network.neutron [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 790.412404] env[62730]: DEBUG nova.network.neutron [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 790.427732] env[62730]: DEBUG nova.network.neutron [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.454402] env[62730]: INFO nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Took 0.10 seconds to deallocate network for instance. [ 790.590553] env[62730]: INFO nova.scheduler.client.report [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Deleted allocations for instance f4408a1f-d3f2-4e1e-ba96-cd509166e31d [ 790.590860] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.529s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.592112] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 2.186s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.594018] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.594018] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.594018] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.594018] env[62730]: DEBUG nova.objects.instance [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lazy-loading 'flavor' on Instance uuid f4408a1f-d3f2-4e1e-ba96-cd509166e31d {{(pid=62730) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.605758] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: 2a5014e6-835c-45fd-b723-a968782dda58] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 790.634611] env[62730]: DEBUG nova.objects.instance [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lazy-loading 'metadata' on Instance uuid f4408a1f-d3f2-4e1e-ba96-cd509166e31d {{(pid=62730) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.641368] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: 2a5014e6-835c-45fd-b723-a968782dda58] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 790.667387] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "2a5014e6-835c-45fd-b723-a968782dda58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.582s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.670089] env[62730]: DEBUG nova.objects.base [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Object Instance lazy-loaded attributes: flavor,metadata {{(pid=62730) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 790.673306] env[62730]: INFO nova.compute.manager [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Terminating instance [ 790.679136] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquiring lock "refresh_cache-f4408a1f-d3f2-4e1e-ba96-cd509166e31d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.679295] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Acquired lock "refresh_cache-f4408a1f-d3f2-4e1e-ba96-cd509166e31d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.679593] env[62730]: DEBUG nova.network.neutron [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 790.683833] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: a897e28b-32bc-4726-ac37-c99dc2efb75d] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 790.733453] env[62730]: DEBUG nova.compute.manager [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: a897e28b-32bc-4726-ac37-c99dc2efb75d] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 790.756671] env[62730]: DEBUG nova.network.neutron [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 790.762325] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa161ff3-470f-4cd4-943f-1f21fe871dd8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.767802] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1c6d3c8e-06fe-4d15-9391-70ab9c1156e1 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "a897e28b-32bc-4726-ac37-c99dc2efb75d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.646s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.778911] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3e6403-5fa9-4691-9d89-a9be9b42def1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.819247] env[62730]: DEBUG nova.compute.manager [None req-7e304476-8bc0-4d27-b120-7cbb36689689 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 2f233d8c-7e64-433e-82aa-ca4b1b2a1798] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 790.823194] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b77ad5-1a06-4ce2-8f19-09d13ac49e69 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.832782] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cde02ab-6334-4899-a7cc-4ffe15dceca2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.849830] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.855023] env[62730]: DEBUG nova.compute.manager [None req-7e304476-8bc0-4d27-b120-7cbb36689689 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 2f233d8c-7e64-433e-82aa-ca4b1b2a1798] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 790.863636] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.879696] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7e304476-8bc0-4d27-b120-7cbb36689689 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "2f233d8c-7e64-433e-82aa-ca4b1b2a1798" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.920s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.886575] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 790.887029] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.183s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.892905] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 790.959730] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.960035] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.961626] env[62730]: INFO nova.compute.claims [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.108874] env[62730]: DEBUG nova.network.neutron [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.118652] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Releasing lock "refresh_cache-f4408a1f-d3f2-4e1e-ba96-cd509166e31d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.119085] env[62730]: DEBUG nova.compute.manager [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 791.119282] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 791.122893] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b19888c-0fb7-4a1f-8659-8524a9db0ba3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.133627] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3da800-60bc-4f53-9a0d-dfeb86db74e4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.170891] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f4408a1f-d3f2-4e1e-ba96-cd509166e31d could not be found. [ 791.171049] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 791.171237] env[62730]: INFO nova.compute.manager [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 791.171511] env[62730]: DEBUG oslo.service.loopingcall [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 791.174622] env[62730]: DEBUG nova.compute.manager [-] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 791.174720] env[62730]: DEBUG nova.network.neutron [-] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 791.205852] env[62730]: DEBUG nova.network.neutron [-] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 791.221302] env[62730]: DEBUG nova.network.neutron [-] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.234023] env[62730]: INFO nova.compute.manager [-] [instance: f4408a1f-d3f2-4e1e-ba96-cd509166e31d] Took 0.06 seconds to deallocate network for instance. [ 791.382168] env[62730]: DEBUG oslo_concurrency.lockutils [None req-affc2228-c661-4076-b8d6-3730a38ed927 tempest-ListServersNegativeTestJSON-1509131991 tempest-ListServersNegativeTestJSON-1509131991-project-member] Lock "f4408a1f-d3f2-4e1e-ba96-cd509166e31d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.790s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.638063] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb13475-babc-4822-8347-bc4b69b55968 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.646677] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1178d5e0-5052-4125-853a-5a4ec7efb6c4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.681816] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614080f2-f135-4361-9e87-cf8cb56cd364 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.691262] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3a8718-687b-49f4-b902-b25c56319254 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.708451] env[62730]: DEBUG nova.compute.provider_tree [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.725571] env[62730]: DEBUG nova.scheduler.client.report [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 791.751116] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.791s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.751639] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 791.816490] env[62730]: DEBUG nova.compute.utils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.821563] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 791.821563] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 791.833301] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 791.925029] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 791.959872] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 791.959872] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 791.959872] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.960064] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 791.960064] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.960064] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 791.960255] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 791.960427] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 791.960600] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 791.960768] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 791.960941] env[62730]: DEBUG nova.virt.hardware [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.961881] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2512cec-d2c2-4c58-b47e-d629506e92da {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.971563] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c00f326-77ad-4581-81c3-de75da8f03b3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.179223] env[62730]: DEBUG nova.policy [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '207e7e672aff46ccb22fda776963775c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '984e31062b234b6ca4d2e7a42126eb64', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 792.879182] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Successfully created port: 583cbe90-c70b-499d-9711-7fb8a4f7e58d {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.113483] env[62730]: DEBUG oslo_concurrency.lockutils [None req-89495b67-e092-4b3a-9647-ca4b5239cce4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Acquiring lock "73339a8b-3cb0-40b6-a467-e78f58902876" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.113483] env[62730]: DEBUG oslo_concurrency.lockutils [None req-89495b67-e092-4b3a-9647-ca4b5239cce4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "73339a8b-3cb0-40b6-a467-e78f58902876" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.299948] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bcdb84af-6d73-487d-a028-95c2463e0aa3 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] Acquiring lock "f945b0e5-e0a5-493f-8fe7-7b3000b1e97b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.300206] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bcdb84af-6d73-487d-a028-95c2463e0aa3 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] Lock "f945b0e5-e0a5-493f-8fe7-7b3000b1e97b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.226251] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Successfully updated port: 583cbe90-c70b-499d-9711-7fb8a4f7e58d {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 794.240135] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.240377] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquired lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.240377] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 794.348528] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.065338] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Updating instance_info_cache with network_info: [{"id": "583cbe90-c70b-499d-9711-7fb8a4f7e58d", "address": "fa:16:3e:d9:15:d6", "network": {"id": "40d3001e-8d6d-4837-814c-0cebd0c4182d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-224260224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "984e31062b234b6ca4d2e7a42126eb64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap583cbe90-c7", "ovs_interfaceid": "583cbe90-c70b-499d-9711-7fb8a4f7e58d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.067446] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "91052772-87d4-4fb3-b590-f071c0419196" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.080955] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Releasing lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.081280] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance network_info: |[{"id": "583cbe90-c70b-499d-9711-7fb8a4f7e58d", "address": "fa:16:3e:d9:15:d6", "network": {"id": "40d3001e-8d6d-4837-814c-0cebd0c4182d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-224260224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "984e31062b234b6ca4d2e7a42126eb64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap583cbe90-c7", "ovs_interfaceid": "583cbe90-c70b-499d-9711-7fb8a4f7e58d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 795.081699] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:15:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '583cbe90-c70b-499d-9711-7fb8a4f7e58d', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 795.091156] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Creating folder: Project (984e31062b234b6ca4d2e7a42126eb64). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 795.091483] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13dab1b9-62d5-4693-bcdc-bd268b3c4c5b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.096987] env[62730]: DEBUG nova.compute.manager [req-dbf76ef3-7609-45a9-8ebc-c023b2ef97eb req-cb11d672-03c3-4e58-9ea1-beaf4bd01eb8 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Received event network-vif-plugged-583cbe90-c70b-499d-9711-7fb8a4f7e58d {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 795.097168] env[62730]: DEBUG oslo_concurrency.lockutils [req-dbf76ef3-7609-45a9-8ebc-c023b2ef97eb req-cb11d672-03c3-4e58-9ea1-beaf4bd01eb8 service nova] Acquiring lock "91052772-87d4-4fb3-b590-f071c0419196-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.097331] env[62730]: DEBUG oslo_concurrency.lockutils [req-dbf76ef3-7609-45a9-8ebc-c023b2ef97eb req-cb11d672-03c3-4e58-9ea1-beaf4bd01eb8 service nova] Lock "91052772-87d4-4fb3-b590-f071c0419196-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.097463] env[62730]: DEBUG oslo_concurrency.lockutils [req-dbf76ef3-7609-45a9-8ebc-c023b2ef97eb req-cb11d672-03c3-4e58-9ea1-beaf4bd01eb8 service nova] Lock "91052772-87d4-4fb3-b590-f071c0419196-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.097693] env[62730]: DEBUG nova.compute.manager [req-dbf76ef3-7609-45a9-8ebc-c023b2ef97eb req-cb11d672-03c3-4e58-9ea1-beaf4bd01eb8 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] No waiting events found dispatching network-vif-plugged-583cbe90-c70b-499d-9711-7fb8a4f7e58d {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 795.097869] env[62730]: WARNING nova.compute.manager [req-dbf76ef3-7609-45a9-8ebc-c023b2ef97eb req-cb11d672-03c3-4e58-9ea1-beaf4bd01eb8 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Received unexpected event network-vif-plugged-583cbe90-c70b-499d-9711-7fb8a4f7e58d for instance with vm_state building and task_state deleting. [ 795.107795] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Created folder: Project (984e31062b234b6ca4d2e7a42126eb64) in parent group-v942928. [ 795.108507] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Creating folder: Instances. Parent ref: group-v942967. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 795.108808] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b24135c2-56a8-48bc-9141-56e9609f8586 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.118264] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Created folder: Instances in parent group-v942967. [ 795.118531] env[62730]: DEBUG oslo.service.loopingcall [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 795.118699] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 795.118904] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a23a2d42-9f2b-4b23-aa36-59ad0a964f8f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.149016] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 795.149016] env[62730]: value = "task-4837106" [ 795.149016] env[62730]: _type = "Task" [ 795.149016] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.154849] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837106, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.657483] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837106, 'name': CreateVM_Task, 'duration_secs': 0.318118} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.657818] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 795.658313] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.658424] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.658774] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 795.659625] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6259cba2-31ca-459c-a04f-ca8c5b45fa34 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.664076] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Waiting for the task: (returnval){ [ 795.664076] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5210e58a-afc9-3c30-4566-3f6d18fbc778" [ 795.664076] env[62730]: _type = "Task" [ 795.664076] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.679035] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5210e58a-afc9-3c30-4566-3f6d18fbc778, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.176586] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.176858] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 796.177079] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.189570] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4a5ac411-17de-49a8-82fe-fcac8203df52 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] Acquiring lock "76ff81c2-500f-4727-9d98-45b57f70eb3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.189792] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4a5ac411-17de-49a8-82fe-fcac8203df52 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] Lock "76ff81c2-500f-4727-9d98-45b57f70eb3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.084336] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9bb91aab-f736-4285-89b3-5a43c479f117 tempest-ServerActionsTestOtherA-895679453 tempest-ServerActionsTestOtherA-895679453-project-member] Acquiring lock "c6e1b57d-7dcc-4703-b7f6-e747c7a89204" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.084674] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9bb91aab-f736-4285-89b3-5a43c479f117 tempest-ServerActionsTestOtherA-895679453 tempest-ServerActionsTestOtherA-895679453-project-member] Lock "c6e1b57d-7dcc-4703-b7f6-e747c7a89204" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.962797] env[62730]: DEBUG nova.compute.manager [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Received event network-changed-583cbe90-c70b-499d-9711-7fb8a4f7e58d {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 797.963131] env[62730]: DEBUG nova.compute.manager [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Refreshing instance network info cache due to event network-changed-583cbe90-c70b-499d-9711-7fb8a4f7e58d. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 797.963401] env[62730]: DEBUG oslo_concurrency.lockutils [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] Acquiring lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.963563] env[62730]: DEBUG oslo_concurrency.lockutils [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] Acquired lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.963984] env[62730]: DEBUG nova.network.neutron [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Refreshing network info cache for port 583cbe90-c70b-499d-9711-7fb8a4f7e58d {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 798.755468] env[62730]: DEBUG nova.network.neutron [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Updated VIF entry in instance network info cache for port 583cbe90-c70b-499d-9711-7fb8a4f7e58d. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 798.755731] env[62730]: DEBUG nova.network.neutron [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Updating instance_info_cache with network_info: [{"id": "583cbe90-c70b-499d-9711-7fb8a4f7e58d", "address": "fa:16:3e:d9:15:d6", "network": {"id": "40d3001e-8d6d-4837-814c-0cebd0c4182d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-224260224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "984e31062b234b6ca4d2e7a42126eb64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap583cbe90-c7", "ovs_interfaceid": "583cbe90-c70b-499d-9711-7fb8a4f7e58d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.771962] env[62730]: DEBUG oslo_concurrency.lockutils [req-eec6a94f-a989-4f6d-a6a8-7325b35c5d7a req-3e6a7d46-3882-44b7-946f-352b575fc522 service nova] Releasing lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.398179] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cfb26a7c-7f53-4d88-9581-12aa0a39ea67 tempest-ServerAddressesNegativeTestJSON-1349538835 tempest-ServerAddressesNegativeTestJSON-1349538835-project-member] Acquiring lock "0d3756a1-0483-44ae-9790-11627a5b6e02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.398539] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cfb26a7c-7f53-4d88-9581-12aa0a39ea67 tempest-ServerAddressesNegativeTestJSON-1349538835 tempest-ServerAddressesNegativeTestJSON-1349538835-project-member] Lock "0d3756a1-0483-44ae-9790-11627a5b6e02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.806195] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ac8876d3-bbbc-43c3-a804-209997d1e333 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "3d002de2-7477-4d62-861f-c2bb273b9b8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.806532] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ac8876d3-bbbc-43c3-a804-209997d1e333 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "3d002de2-7477-4d62-861f-c2bb273b9b8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.646137] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4afaf63c-361a-4ca7-bc1b-cd460e1f1549 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "c104be3c-0108-468a-b99c-f0a3955d4c7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.646137] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4afaf63c-361a-4ca7-bc1b-cd460e1f1549 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "c104be3c-0108-468a-b99c-f0a3955d4c7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.512773] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0b53a35a-7c1d-45c7-bb66-00e70b27aaf7 tempest-ServersAdminNegativeTestJSON-625958026 tempest-ServersAdminNegativeTestJSON-625958026-project-member] Acquiring lock "9852654d-352a-4f6a-81b1-48d4399690e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.513191] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0b53a35a-7c1d-45c7-bb66-00e70b27aaf7 tempest-ServersAdminNegativeTestJSON-625958026 tempest-ServersAdminNegativeTestJSON-625958026-project-member] Lock "9852654d-352a-4f6a-81b1-48d4399690e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.593629] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5dae45e9-78e7-4681-84a1-db7bb9334920 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Acquiring lock "6868e76a-17a5-41d5-81bb-e83747586ffc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.593955] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5dae45e9-78e7-4681-84a1-db7bb9334920 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "6868e76a-17a5-41d5-81bb-e83747586ffc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.167857] env[62730]: DEBUG oslo_concurrency.lockutils [None req-93fb1c9e-6820-4112-b858-4e9dbbe3522f tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] Acquiring lock "ec9d765b-adb7-428c-9ab7-4a4cd90baa44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.168497] env[62730]: DEBUG oslo_concurrency.lockutils [None req-93fb1c9e-6820-4112-b858-4e9dbbe3522f tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] Lock "ec9d765b-adb7-428c-9ab7-4a4cd90baa44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.583549] env[62730]: WARNING oslo_vmware.rw_handles [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 834.583549] env[62730]: ERROR oslo_vmware.rw_handles [ 834.584310] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 834.586013] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 834.587445] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Copying Virtual Disk [datastore2] vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/34b7ac8c-1b9d-4a03-a19a-ae2ae80c828d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 834.589319] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b66927f0-cc44-4b3b-8869-333b9358ea0d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.597664] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Waiting for the task: (returnval){ [ 834.597664] env[62730]: value = "task-4837107" [ 834.597664] env[62730]: _type = "Task" [ 834.597664] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.606407] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Task: {'id': task-4837107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.109020] env[62730]: DEBUG oslo_vmware.exceptions [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 835.109439] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.110115] env[62730]: ERROR nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 835.110115] env[62730]: Faults: ['InvalidArgument'] [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Traceback (most recent call last): [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] yield resources [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self.driver.spawn(context, instance, image_meta, [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self._vmops.spawn(context, instance, image_meta, injected_files, [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self._fetch_image_if_missing(context, vi) [ 835.110115] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] image_cache(vi, tmp_image_ds_loc) [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] vm_util.copy_virtual_disk( [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] session._wait_for_task(vmdk_copy_task) [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] return self.wait_for_task(task_ref) [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] return evt.wait() [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] result = hub.switch() [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 835.110528] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] return self.greenlet.switch() [ 835.111218] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 835.111218] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self.f(*self.args, **self.kw) [ 835.111218] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 835.111218] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] raise exceptions.translate_fault(task_info.error) [ 835.111218] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 835.111218] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Faults: ['InvalidArgument'] [ 835.111218] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] [ 835.111218] env[62730]: INFO nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Terminating instance [ 835.112175] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.112383] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.113031] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 835.113230] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 835.113467] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-802668f1-de5f-4efa-803f-2e205eb72619 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.116606] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c10c2a9-251d-459b-9eab-c2558ba0b9ab {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.127439] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 835.127678] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a2de3c0-d24f-4630-a935-1a5178645762 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.130173] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.130347] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 835.131301] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f423356c-7dfb-4ef8-9189-6da9e1523543 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.136917] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 835.136917] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b7596a-b13f-393a-6607-f93688f5351b" [ 835.136917] env[62730]: _type = "Task" [ 835.136917] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.145855] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b7596a-b13f-393a-6607-f93688f5351b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.209125] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 835.209361] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 835.209548] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Deleting the datastore file [datastore2] 318f7880-c500-40b8-9ca1-d8a857b36a88 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.209856] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a91c2930-2edc-4d9f-be68-6a2a4e02fef7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.216868] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Waiting for the task: (returnval){ [ 835.216868] env[62730]: value = "task-4837109" [ 835.216868] env[62730]: _type = "Task" [ 835.216868] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.225611] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Task: {'id': task-4837109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.647997] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 835.648656] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating directory with path [datastore2] vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.648656] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40d77068-7977-488c-8209-8846e93273a0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.661128] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Created directory with path [datastore2] vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.661330] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Fetch image to [datastore2] vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 835.661501] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 835.662373] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f0b610-1ab7-4ac0-8aac-c3a684df6543 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.670688] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd833ca-879c-4ae4-90b8-3ff042cd8e33 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.680738] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5b255a-021a-4c19-8774-9775206897e3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.712755] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719304ae-c957-4b14-bbec-b4741ef1465a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.722129] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-61d5871e-4b06-4423-be88-0288dd1d5e86 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.729755] env[62730]: DEBUG oslo_vmware.api [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Task: {'id': task-4837109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073122} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.730038] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.731270] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 835.731454] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 835.731631] env[62730]: INFO nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Took 0.62 seconds to destroy the instance on the hypervisor. [ 835.733855] env[62730]: DEBUG nova.compute.claims [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 835.734076] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.734289] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.756717] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 835.890223] env[62730]: DEBUG oslo_vmware.rw_handles [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 835.951738] env[62730]: DEBUG oslo_vmware.rw_handles [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 835.951884] env[62730]: DEBUG oslo_vmware.rw_handles [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 836.236267] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9db2ee-7c9e-4012-b339-fb37c3b7768b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.244279] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e016495e-821b-447d-b587-3dab0b148f2c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.274698] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5caab0c-9c30-481f-ace5-97336e939633 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.282586] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0369ce2d-417b-4d60-b0f9-2e1d8aaf8c07 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.296048] env[62730]: DEBUG nova.compute.provider_tree [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.304884] env[62730]: DEBUG nova.scheduler.client.report [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.320845] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.586s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.321437] env[62730]: ERROR nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 836.321437] env[62730]: Faults: ['InvalidArgument'] [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Traceback (most recent call last): [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self.driver.spawn(context, instance, image_meta, [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self._fetch_image_if_missing(context, vi) [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] image_cache(vi, tmp_image_ds_loc) [ 836.321437] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] vm_util.copy_virtual_disk( [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] session._wait_for_task(vmdk_copy_task) [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] return self.wait_for_task(task_ref) [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] return evt.wait() [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] result = hub.switch() [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] return self.greenlet.switch() [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 836.321828] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] self.f(*self.args, **self.kw) [ 836.322192] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 836.322192] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] raise exceptions.translate_fault(task_info.error) [ 836.322192] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 836.322192] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Faults: ['InvalidArgument'] [ 836.322192] env[62730]: ERROR nova.compute.manager [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] [ 836.322325] env[62730]: DEBUG nova.compute.utils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 836.323748] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Build of instance 318f7880-c500-40b8-9ca1-d8a857b36a88 was re-scheduled: A specified parameter was not correct: fileType [ 836.323748] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 836.324176] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 836.324361] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 836.324536] env[62730]: DEBUG nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 836.324692] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 836.868365] env[62730]: DEBUG nova.network.neutron [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.889019] env[62730]: INFO nova.compute.manager [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Took 0.56 seconds to deallocate network for instance. [ 837.000259] env[62730]: INFO nova.scheduler.client.report [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Deleted allocations for instance 318f7880-c500-40b8-9ca1-d8a857b36a88 [ 837.024129] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3ae98b24-eb35-4001-981a-591dcdfe1cd0 tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 296.884s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.025343] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 98.451s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.025560] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Acquiring lock "318f7880-c500-40b8-9ca1-d8a857b36a88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.025777] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.025956] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.028120] env[62730]: INFO nova.compute.manager [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Terminating instance [ 837.030151] env[62730]: DEBUG nova.compute.manager [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 837.030358] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 837.030805] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01969412-49dc-40f5-b791-7af408134d56 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.041714] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103967a3-6d05-44a0-ab22-9ef6c3cfa861 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.052527] env[62730]: DEBUG nova.compute.manager [None req-d7175b12-7b3e-4114-a82f-b418d676df40 tempest-ServerGroupTestJSON-1801781332 tempest-ServerGroupTestJSON-1801781332-project-member] [instance: c1658258-9147-431c-9e6d-5f8360523c23] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.077883] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 318f7880-c500-40b8-9ca1-d8a857b36a88 could not be found. [ 837.078127] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 837.078319] env[62730]: INFO nova.compute.manager [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Took 0.05 seconds to destroy the instance on the hypervisor. [ 837.078572] env[62730]: DEBUG oslo.service.loopingcall [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.078817] env[62730]: DEBUG nova.compute.manager [-] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 837.078914] env[62730]: DEBUG nova.network.neutron [-] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 837.098927] env[62730]: DEBUG nova.compute.manager [None req-d7175b12-7b3e-4114-a82f-b418d676df40 tempest-ServerGroupTestJSON-1801781332 tempest-ServerGroupTestJSON-1801781332-project-member] [instance: c1658258-9147-431c-9e6d-5f8360523c23] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.126281] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d7175b12-7b3e-4114-a82f-b418d676df40 tempest-ServerGroupTestJSON-1801781332 tempest-ServerGroupTestJSON-1801781332-project-member] Lock "c1658258-9147-431c-9e6d-5f8360523c23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.598s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.128353] env[62730]: DEBUG nova.network.neutron [-] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.136998] env[62730]: DEBUG nova.compute.manager [None req-e937e9cd-b87a-4928-b59f-c4a5024e2ad3 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 9d39c196-4ab4-4a97-9c82-44b4a4b107a8] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.139717] env[62730]: INFO nova.compute.manager [-] [instance: 318f7880-c500-40b8-9ca1-d8a857b36a88] Took 0.06 seconds to deallocate network for instance. [ 837.170863] env[62730]: DEBUG nova.compute.manager [None req-e937e9cd-b87a-4928-b59f-c4a5024e2ad3 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 9d39c196-4ab4-4a97-9c82-44b4a4b107a8] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.195631] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e937e9cd-b87a-4928-b59f-c4a5024e2ad3 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "9d39c196-4ab4-4a97-9c82-44b4a4b107a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.709s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.205900] env[62730]: DEBUG nova.compute.manager [None req-56821f9e-77d9-42b7-8043-a3b68677cfbe tempest-ServerMetadataTestJSON-1845498096 tempest-ServerMetadataTestJSON-1845498096-project-member] [instance: b7273f62-c330-4b6a-a6e3-39d76c46aac9] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.235120] env[62730]: DEBUG nova.compute.manager [None req-56821f9e-77d9-42b7-8043-a3b68677cfbe tempest-ServerMetadataTestJSON-1845498096 tempest-ServerMetadataTestJSON-1845498096-project-member] [instance: b7273f62-c330-4b6a-a6e3-39d76c46aac9] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.262316] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1788cb79-8505-44a8-81a6-bcf1f7b8a64b tempest-AttachInterfacesUnderV243Test-1647568674 tempest-AttachInterfacesUnderV243Test-1647568674-project-member] Lock "318f7880-c500-40b8-9ca1-d8a857b36a88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.237s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.264220] env[62730]: DEBUG oslo_concurrency.lockutils [None req-56821f9e-77d9-42b7-8043-a3b68677cfbe tempest-ServerMetadataTestJSON-1845498096 tempest-ServerMetadataTestJSON-1845498096-project-member] Lock "b7273f62-c330-4b6a-a6e3-39d76c46aac9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.248s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.276719] env[62730]: DEBUG nova.compute.manager [None req-18c2580f-111a-47f9-a656-a0a19dd93958 tempest-ServerExternalEventsTest-1788746039 tempest-ServerExternalEventsTest-1788746039-project-member] [instance: 1dc56d8a-02e3-4441-9bb5-f091ecac835a] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.299329] env[62730]: DEBUG nova.compute.manager [None req-18c2580f-111a-47f9-a656-a0a19dd93958 tempest-ServerExternalEventsTest-1788746039 tempest-ServerExternalEventsTest-1788746039-project-member] [instance: 1dc56d8a-02e3-4441-9bb5-f091ecac835a] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.321525] env[62730]: DEBUG oslo_concurrency.lockutils [None req-18c2580f-111a-47f9-a656-a0a19dd93958 tempest-ServerExternalEventsTest-1788746039 tempest-ServerExternalEventsTest-1788746039-project-member] Lock "1dc56d8a-02e3-4441-9bb5-f091ecac835a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.352s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.333968] env[62730]: DEBUG nova.compute.manager [None req-a599fe64-53d8-4a9b-a264-5b4b276cf7e1 tempest-ServersNegativeTestMultiTenantJSON-1706258361 tempest-ServersNegativeTestMultiTenantJSON-1706258361-project-member] [instance: d8656d64-907d-4524-905a-aa67a4ad1f63] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.374596] env[62730]: DEBUG nova.compute.manager [None req-a599fe64-53d8-4a9b-a264-5b4b276cf7e1 tempest-ServersNegativeTestMultiTenantJSON-1706258361 tempest-ServersNegativeTestMultiTenantJSON-1706258361-project-member] [instance: d8656d64-907d-4524-905a-aa67a4ad1f63] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.399879] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a599fe64-53d8-4a9b-a264-5b4b276cf7e1 tempest-ServersNegativeTestMultiTenantJSON-1706258361 tempest-ServersNegativeTestMultiTenantJSON-1706258361-project-member] Lock "d8656d64-907d-4524-905a-aa67a4ad1f63" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.820s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.412042] env[62730]: DEBUG nova.compute.manager [None req-8d294b59-eedf-4a43-9926-42699b612df4 tempest-ServerAddressesTestJSON-2073240203 tempest-ServerAddressesTestJSON-2073240203-project-member] [instance: 6d8cad34-699c-4dcc-8f83-e21490f82b8d] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.441680] env[62730]: DEBUG nova.compute.manager [None req-8d294b59-eedf-4a43-9926-42699b612df4 tempest-ServerAddressesTestJSON-2073240203 tempest-ServerAddressesTestJSON-2073240203-project-member] [instance: 6d8cad34-699c-4dcc-8f83-e21490f82b8d] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.478174] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8d294b59-eedf-4a43-9926-42699b612df4 tempest-ServerAddressesTestJSON-2073240203 tempest-ServerAddressesTestJSON-2073240203-project-member] Lock "6d8cad34-699c-4dcc-8f83-e21490f82b8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.462s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.488223] env[62730]: DEBUG nova.compute.manager [None req-d887d11b-fd41-49f2-86d2-73e73b305897 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: cb5b5e10-41e5-497e-b409-0a83fa00896c] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.513470] env[62730]: DEBUG nova.compute.manager [None req-d887d11b-fd41-49f2-86d2-73e73b305897 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: cb5b5e10-41e5-497e-b409-0a83fa00896c] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.534572] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d887d11b-fd41-49f2-86d2-73e73b305897 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "cb5b5e10-41e5-497e-b409-0a83fa00896c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.502s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.543960] env[62730]: DEBUG nova.compute.manager [None req-c60a7df4-bcf2-48f7-9bb6-8ba782782026 tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] [instance: 91907341-29ff-42b3-a25f-a9e990af4de1] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.567831] env[62730]: DEBUG nova.compute.manager [None req-c60a7df4-bcf2-48f7-9bb6-8ba782782026 tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] [instance: 91907341-29ff-42b3-a25f-a9e990af4de1] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 837.594036] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c60a7df4-bcf2-48f7-9bb6-8ba782782026 tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] Lock "91907341-29ff-42b3-a25f-a9e990af4de1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.778s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.608918] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 837.679885] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.680199] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.681724] env[62730]: INFO nova.compute.claims [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.134016] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d57bdf7-faf1-4d2d-b627-8764b0484b3a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.141472] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f24a98-2fea-48d5-be09-b650f5b7c612 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.173155] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13124ba9-21f3-4422-8a2d-6d7da64ccedd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.181287] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92accd9-d3eb-4984-ac31-33d4be594ae9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.195686] env[62730]: DEBUG nova.compute.provider_tree [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.206156] env[62730]: DEBUG nova.scheduler.client.report [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 838.226697] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.546s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.227259] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 838.272608] env[62730]: DEBUG nova.compute.utils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.274053] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 838.274282] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 838.284888] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 838.373675] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 838.403215] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 838.403480] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 838.403710] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.403940] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 838.405763] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.405763] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 838.405763] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 838.405763] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 838.405763] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 838.406060] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 838.406060] env[62730]: DEBUG nova.virt.hardware [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 838.406485] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03d1dbe-eaae-423e-9d78-87daf5d98349 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.415626] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395473a0-acf8-4b47-893b-04d9041e8ad3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.442200] env[62730]: DEBUG nova.policy [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '686db289143541d1a4aced35036b4cd9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0dc4f70a095944708ebe176443cc2134', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 839.573141] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Successfully created port: 9ff57aa7-29a6-466e-90e8-11785c01bb6a {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.355932] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Successfully updated port: 9ff57aa7-29a6-466e-90e8-11785c01bb6a {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.379758] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "refresh_cache-04ba035f-97b6-49d1-8506-35f7d6fccb03" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.379915] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquired lock "refresh_cache-04ba035f-97b6-49d1-8506-35f7d6fccb03" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.380086] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 841.508255] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 841.769029] env[62730]: DEBUG nova.compute.manager [req-a0c4aa69-9e95-446d-bb51-fca39593998c req-4e237e6f-c980-4376-8eab-9129cbf4d51f service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Received event network-vif-plugged-9ff57aa7-29a6-466e-90e8-11785c01bb6a {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 841.769295] env[62730]: DEBUG oslo_concurrency.lockutils [req-a0c4aa69-9e95-446d-bb51-fca39593998c req-4e237e6f-c980-4376-8eab-9129cbf4d51f service nova] Acquiring lock "04ba035f-97b6-49d1-8506-35f7d6fccb03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.769523] env[62730]: DEBUG oslo_concurrency.lockutils [req-a0c4aa69-9e95-446d-bb51-fca39593998c req-4e237e6f-c980-4376-8eab-9129cbf4d51f service nova] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.769732] env[62730]: DEBUG oslo_concurrency.lockutils [req-a0c4aa69-9e95-446d-bb51-fca39593998c req-4e237e6f-c980-4376-8eab-9129cbf4d51f service nova] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.769904] env[62730]: DEBUG nova.compute.manager [req-a0c4aa69-9e95-446d-bb51-fca39593998c req-4e237e6f-c980-4376-8eab-9129cbf4d51f service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] No waiting events found dispatching network-vif-plugged-9ff57aa7-29a6-466e-90e8-11785c01bb6a {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 841.770156] env[62730]: WARNING nova.compute.manager [req-a0c4aa69-9e95-446d-bb51-fca39593998c req-4e237e6f-c980-4376-8eab-9129cbf4d51f service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Received unexpected event network-vif-plugged-9ff57aa7-29a6-466e-90e8-11785c01bb6a for instance with vm_state building and task_state spawning. [ 842.253214] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Updating instance_info_cache with network_info: [{"id": "9ff57aa7-29a6-466e-90e8-11785c01bb6a", "address": "fa:16:3e:d3:a0:81", "network": {"id": "ae7e843f-2c2d-43e1-84c4-42951db959d6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-217164123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dc4f70a095944708ebe176443cc2134", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff57aa7-29", "ovs_interfaceid": "9ff57aa7-29a6-466e-90e8-11785c01bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.273481] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Releasing lock "refresh_cache-04ba035f-97b6-49d1-8506-35f7d6fccb03" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.273481] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Instance network_info: |[{"id": "9ff57aa7-29a6-466e-90e8-11785c01bb6a", "address": "fa:16:3e:d3:a0:81", "network": {"id": "ae7e843f-2c2d-43e1-84c4-42951db959d6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-217164123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dc4f70a095944708ebe176443cc2134", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff57aa7-29", "ovs_interfaceid": "9ff57aa7-29a6-466e-90e8-11785c01bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 842.273768] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:a0:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ff57aa7-29a6-466e-90e8-11785c01bb6a', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.282827] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Creating folder: Project (0dc4f70a095944708ebe176443cc2134). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 842.284478] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00cf3264-1fbc-42bb-932d-a6c0b1aceaa7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.298947] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Created folder: Project (0dc4f70a095944708ebe176443cc2134) in parent group-v942928. [ 842.299793] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Creating folder: Instances. Parent ref: group-v942970. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 842.300399] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8637469-1d74-445e-a7f0-cd1fad3efbc5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.314322] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Created folder: Instances in parent group-v942970. [ 842.314322] env[62730]: DEBUG oslo.service.loopingcall [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.314322] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 842.314322] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10c6e581-d9bc-40ba-ae2c-f1a91430e598 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.337243] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.337243] env[62730]: value = "task-4837112" [ 842.337243] env[62730]: _type = "Task" [ 842.337243] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.347567] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837112, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.850610] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837112, 'name': CreateVM_Task, 'duration_secs': 0.336501} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.851036] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 842.854513] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.854762] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.855163] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 842.855434] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d65be0bc-1d4d-4a53-b38d-a1da40996644 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.861693] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Waiting for the task: (returnval){ [ 842.861693] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b29bd0-7597-0a2b-125d-543696592bdc" [ 842.861693] env[62730]: _type = "Task" [ 842.861693] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.877501] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b29bd0-7597-0a2b-125d-543696592bdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.374205] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.374474] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.374737] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.782746] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.820918] env[62730]: DEBUG nova.compute.manager [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Received event network-changed-9ff57aa7-29a6-466e-90e8-11785c01bb6a {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 844.821212] env[62730]: DEBUG nova.compute.manager [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Refreshing instance network info cache due to event network-changed-9ff57aa7-29a6-466e-90e8-11785c01bb6a. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 844.821349] env[62730]: DEBUG oslo_concurrency.lockutils [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] Acquiring lock "refresh_cache-04ba035f-97b6-49d1-8506-35f7d6fccb03" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.821498] env[62730]: DEBUG oslo_concurrency.lockutils [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] Acquired lock "refresh_cache-04ba035f-97b6-49d1-8506-35f7d6fccb03" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.821671] env[62730]: DEBUG nova.network.neutron [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Refreshing network info cache for port 9ff57aa7-29a6-466e-90e8-11785c01bb6a {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 844.883162] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.916503] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 845.596930] env[62730]: DEBUG nova.network.neutron [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Updated VIF entry in instance network info cache for port 9ff57aa7-29a6-466e-90e8-11785c01bb6a. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 845.596930] env[62730]: DEBUG nova.network.neutron [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Updating instance_info_cache with network_info: [{"id": "9ff57aa7-29a6-466e-90e8-11785c01bb6a", "address": "fa:16:3e:d3:a0:81", "network": {"id": "ae7e843f-2c2d-43e1-84c4-42951db959d6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-217164123-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0dc4f70a095944708ebe176443cc2134", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff57aa7-29", "ovs_interfaceid": "9ff57aa7-29a6-466e-90e8-11785c01bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.608658] env[62730]: DEBUG oslo_concurrency.lockutils [req-3aeac4c0-421b-484b-acf0-3d7c08fbb472 req-6d3e46d1-0740-472e-8c74-8ef7f555f998 service nova] Releasing lock "refresh_cache-04ba035f-97b6-49d1-8506-35f7d6fccb03" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.738781] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.740173] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.740173] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.740173] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.740745] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.740745] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 848.740745] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 848.765801] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766021] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766105] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766233] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766355] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766474] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766592] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766718] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766838] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.766952] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 848.767080] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 849.565236] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.565553] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.737152] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.737422] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.749722] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.750093] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.750141] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.750298] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 849.752134] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e8376c-0e44-4912-882d-f8e5355bd8c9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.763190] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eefb7fb-a5d7-4326-862a-f2d123da51da {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.782850] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b79acc-c2e0-4218-92c0-9be3593b4f6f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.791313] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f4ee43-6ae3-42b2-bf56-26a2a3713a71 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.828264] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180563MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 849.828694] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.828757] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.957136] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16f7dfdb-2063-4992-9f40-4b332006940f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.957273] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.957404] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.957529] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d90fd82e-a469-41c7-b414-c7eb5554e72a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.957650] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.957770] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.957888] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.958015] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.958189] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.958309] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 849.976812] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 849.992018] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.008433] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8504a95d-6003-4698-a3b5-4913eb59c932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.030802] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 13fb51cc-7cfc-44f9-9a15-381762007fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.045552] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cced1efc-f73f-43a5-8a13-de65ef5703b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.089819] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.106863] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 73339a8b-3cb0-40b6-a467-e78f58902876 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.120022] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f945b0e5-e0a5-493f-8fe7-7b3000b1e97b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.134017] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 76ff81c2-500f-4727-9d98-45b57f70eb3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.148450] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c6e1b57d-7dcc-4703-b7f6-e747c7a89204 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.168973] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0d3756a1-0483-44ae-9790-11627a5b6e02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.182990] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c104be3c-0108-468a-b99c-f0a3955d4c7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.195018] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9852654d-352a-4f6a-81b1-48d4399690e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.207802] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6868e76a-17a5-41d5-81bb-e83747586ffc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.222082] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ec9d765b-adb7-428c-9ab7-4a4cd90baa44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.233802] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 850.234169] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 850.234345] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '23', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '2', 'num_os_type_None': '10', 'num_proj_cf705f506bcc4409881416d80a745afc': '2', 'io_workload': '10', 'num_task_deleting': '8', 'num_proj_7e26a6097b8c4bf3b6d4b77656087f8c': '2', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 850.673019] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2943d2f-cbab-4c67-bbd7-77e8696f5dc8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.687921] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8aeb4b0-c221-40ae-8909-cc3558235a08 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.722235] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7342fde-3763-4adb-ac04-6b45e7fd2664 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.731251] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095f4cd1-ac0b-4765-a1e3-0e2eea2015cb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.747252] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.758782] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.779192] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 850.779547] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.951s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.667395] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2d117a17-01b8-428b-838c-302cb0d3cbf5 tempest-FloatingIPsAssociationTestJSON-661442993 tempest-FloatingIPsAssociationTestJSON-661442993-project-member] Acquiring lock "f0be97b5-35e3-4c67-96f6-c604a71c38b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.667984] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2d117a17-01b8-428b-838c-302cb0d3cbf5 tempest-FloatingIPsAssociationTestJSON-661442993 tempest-FloatingIPsAssociationTestJSON-661442993-project-member] Lock "f0be97b5-35e3-4c67-96f6-c604a71c38b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.780663] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.780663] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 859.755546] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fecfe18-6bc6-4822-a5e4-23062e9bbed2 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] Acquiring lock "6ab13a84-4fcf-451a-a8d7-79ec54af27da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.755872] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fecfe18-6bc6-4822-a5e4-23062e9bbed2 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] Lock "6ab13a84-4fcf-451a-a8d7-79ec54af27da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.062790] env[62730]: WARNING oslo_vmware.rw_handles [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 883.062790] env[62730]: ERROR oslo_vmware.rw_handles [ 883.063773] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 883.065479] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 883.065757] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Copying Virtual Disk [datastore2] vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/fb1818eb-4c9e-49c3-b4a0-6daf45335f7f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 883.066077] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98e3c42a-7cd1-4d94-bb6c-f154577874bb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.078031] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 883.078031] env[62730]: value = "task-4837113" [ 883.078031] env[62730]: _type = "Task" [ 883.078031] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.087865] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': task-4837113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.587418] env[62730]: DEBUG oslo_vmware.exceptions [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 883.587734] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.588311] env[62730]: ERROR nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 883.588311] env[62730]: Faults: ['InvalidArgument'] [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Traceback (most recent call last): [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] yield resources [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self.driver.spawn(context, instance, image_meta, [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self._fetch_image_if_missing(context, vi) [ 883.588311] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] image_cache(vi, tmp_image_ds_loc) [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] vm_util.copy_virtual_disk( [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] session._wait_for_task(vmdk_copy_task) [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] return self.wait_for_task(task_ref) [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] return evt.wait() [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] result = hub.switch() [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 883.588724] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] return self.greenlet.switch() [ 883.589161] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 883.589161] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self.f(*self.args, **self.kw) [ 883.589161] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 883.589161] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] raise exceptions.translate_fault(task_info.error) [ 883.589161] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 883.589161] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Faults: ['InvalidArgument'] [ 883.589161] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] [ 883.589161] env[62730]: INFO nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Terminating instance [ 883.590303] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.590515] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.590762] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed639fe1-25e6-44b1-b797-973ab6585b11 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.593192] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 883.593398] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 883.594212] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d375cf-8769-44c3-81b9-5cd859aa252b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.603577] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 883.603933] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8484675e-7acd-40f9-b595-b34010ab1b62 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.606619] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.606803] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 883.607866] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3197a30b-9bff-43aa-977a-9704664cb753 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.613841] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 883.613841] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5278cf50-ddd6-25c2-8cb5-b3a4bc7d30b0" [ 883.613841] env[62730]: _type = "Task" [ 883.613841] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.622664] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5278cf50-ddd6-25c2-8cb5-b3a4bc7d30b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.682275] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 883.682567] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 883.682691] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Deleting the datastore file [datastore2] 16f7dfdb-2063-4992-9f40-4b332006940f {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.682938] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61853804-bfeb-4c86-9f1b-491f565c0b05 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.692029] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 883.692029] env[62730]: value = "task-4837115" [ 883.692029] env[62730]: _type = "Task" [ 883.692029] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.701687] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': task-4837115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.126640] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 884.127059] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating directory with path [datastore2] vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.127215] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bd5cea9-a370-4674-b9d9-5d4ae05e4d1b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.141260] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Created directory with path [datastore2] vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.141522] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Fetch image to [datastore2] vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 884.141822] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 884.142696] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27725192-bb46-4298-9c7e-c421babda05b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.150996] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce4f2b5-0e49-4a02-8a50-e60cbcd3f2d0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.164018] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d8a0a4-a3f7-47c6-9d7a-1bc57de09f7e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.199638] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c984e982-6883-49d0-8b80-7ead49e97d50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.209385] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-89b0f335-67ff-4bcb-8c5f-f8c28331d1f2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.211380] env[62730]: DEBUG oslo_vmware.api [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': task-4837115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085779} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.211755] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.211978] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 884.212177] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 884.212356] env[62730]: INFO nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 884.214708] env[62730]: DEBUG nova.compute.claims [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 884.214910] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.215145] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.236912] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 884.310609] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 884.382297] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 884.382492] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 884.735943] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bdf829-932d-4899-b3fb-bc42a50259c7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.744366] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71dc9b55-c65f-4574-8d2c-980d3537c540 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.775636] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de894a07-695b-439d-8073-147580d97b6a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.784047] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f519a8-4ab9-4210-974d-11abb19694c9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.798037] env[62730]: DEBUG nova.compute.provider_tree [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.806663] env[62730]: DEBUG nova.scheduler.client.report [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 884.825409] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.610s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.825963] env[62730]: ERROR nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 884.825963] env[62730]: Faults: ['InvalidArgument'] [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Traceback (most recent call last): [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self.driver.spawn(context, instance, image_meta, [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self._fetch_image_if_missing(context, vi) [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] image_cache(vi, tmp_image_ds_loc) [ 884.825963] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] vm_util.copy_virtual_disk( [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] session._wait_for_task(vmdk_copy_task) [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] return self.wait_for_task(task_ref) [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] return evt.wait() [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] result = hub.switch() [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] return self.greenlet.switch() [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 884.826354] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] self.f(*self.args, **self.kw) [ 884.826701] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 884.826701] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] raise exceptions.translate_fault(task_info.error) [ 884.826701] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 884.826701] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Faults: ['InvalidArgument'] [ 884.826701] env[62730]: ERROR nova.compute.manager [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] [ 884.826838] env[62730]: DEBUG nova.compute.utils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 884.830843] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Build of instance 16f7dfdb-2063-4992-9f40-4b332006940f was re-scheduled: A specified parameter was not correct: fileType [ 884.830843] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 884.831226] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 884.831408] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 884.831580] env[62730]: DEBUG nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 884.831770] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 885.355470] env[62730]: DEBUG nova.network.neutron [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.370162] env[62730]: INFO nova.compute.manager [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: 16f7dfdb-2063-4992-9f40-4b332006940f] Took 0.54 seconds to deallocate network for instance. [ 885.507894] env[62730]: INFO nova.scheduler.client.report [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Deleted allocations for instance 16f7dfdb-2063-4992-9f40-4b332006940f [ 885.539359] env[62730]: DEBUG oslo_concurrency.lockutils [None req-97036a29-1790-42c1-bdcd-dbb6ff737559 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "16f7dfdb-2063-4992-9f40-4b332006940f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 341.574s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.568219] env[62730]: DEBUG nova.compute.manager [None req-89c0b534-6c53-4d11-9023-c0941cd4b1b8 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 4cefd92c-8058-4e3e-a175-4807a84e0b3d] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.613543] env[62730]: DEBUG nova.compute.manager [None req-89c0b534-6c53-4d11-9023-c0941cd4b1b8 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 4cefd92c-8058-4e3e-a175-4807a84e0b3d] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.644942] env[62730]: DEBUG oslo_concurrency.lockutils [None req-89c0b534-6c53-4d11-9023-c0941cd4b1b8 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "4cefd92c-8058-4e3e-a175-4807a84e0b3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 245.115s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.655923] env[62730]: DEBUG nova.compute.manager [None req-3f699b23-b687-4fdd-a972-63acb361c320 tempest-ServersNegativeTestJSON-202746956 tempest-ServersNegativeTestJSON-202746956-project-member] [instance: 45bb8da2-c544-4935-a3a8-62305e599c06] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.683957] env[62730]: DEBUG nova.compute.manager [None req-3f699b23-b687-4fdd-a972-63acb361c320 tempest-ServersNegativeTestJSON-202746956 tempest-ServersNegativeTestJSON-202746956-project-member] [instance: 45bb8da2-c544-4935-a3a8-62305e599c06] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.712658] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3f699b23-b687-4fdd-a972-63acb361c320 tempest-ServersNegativeTestJSON-202746956 tempest-ServersNegativeTestJSON-202746956-project-member] Lock "45bb8da2-c544-4935-a3a8-62305e599c06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.978s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.729497] env[62730]: DEBUG nova.compute.manager [None req-4df8e47e-4873-4a79-bbb1-ce4b48a512db tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 0e7106c5-fca6-4d97-a6dd-f0670ca42202] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.756832] env[62730]: DEBUG nova.compute.manager [None req-4df8e47e-4873-4a79-bbb1-ce4b48a512db tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 0e7106c5-fca6-4d97-a6dd-f0670ca42202] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.782497] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4df8e47e-4873-4a79-bbb1-ce4b48a512db tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "0e7106c5-fca6-4d97-a6dd-f0670ca42202" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.274s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.793106] env[62730]: DEBUG nova.compute.manager [None req-e95cd872-f602-4f1d-a989-8ca7023305c6 tempest-TenantUsagesTestJSON-1260001862 tempest-TenantUsagesTestJSON-1260001862-project-member] [instance: 3d3b40de-1123-44ed-b241-746731c3097c] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.819554] env[62730]: DEBUG nova.compute.manager [None req-e95cd872-f602-4f1d-a989-8ca7023305c6 tempest-TenantUsagesTestJSON-1260001862 tempest-TenantUsagesTestJSON-1260001862-project-member] [instance: 3d3b40de-1123-44ed-b241-746731c3097c] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.845260] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e95cd872-f602-4f1d-a989-8ca7023305c6 tempest-TenantUsagesTestJSON-1260001862 tempest-TenantUsagesTestJSON-1260001862-project-member] Lock "3d3b40de-1123-44ed-b241-746731c3097c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.197s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.863161] env[62730]: DEBUG nova.compute.manager [None req-4a4512d4-ea29-48df-87a9-c405e7224534 tempest-ServerTagsTestJSON-360681645 tempest-ServerTagsTestJSON-360681645-project-member] [instance: 7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.889314] env[62730]: DEBUG nova.compute.manager [None req-4a4512d4-ea29-48df-87a9-c405e7224534 tempest-ServerTagsTestJSON-360681645 tempest-ServerTagsTestJSON-360681645-project-member] [instance: 7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 885.916243] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4a4512d4-ea29-48df-87a9-c405e7224534 tempest-ServerTagsTestJSON-360681645 tempest-ServerTagsTestJSON-360681645-project-member] Lock "7b8518d4-6d0a-4ba1-b95b-86e8e6774dfc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.057s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.927396] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 885.988423] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.988690] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.990364] env[62730]: INFO nova.compute.claims [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.406957] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a3f33d-afbc-4f12-be09-2b8597e1191f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.415395] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6560effb-42be-4be2-91ed-dc41a9ac81f9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.446351] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89bd8d6b-3c13-4677-9a66-6c0a1a756515 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.454335] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a92cd87-dcd3-43d8-8f7f-426a1ccdcb0c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.469892] env[62730]: DEBUG nova.compute.provider_tree [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.482092] env[62730]: DEBUG nova.scheduler.client.report [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.497726] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.509s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.498266] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 886.543354] env[62730]: DEBUG nova.compute.utils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.545337] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 886.545620] env[62730]: DEBUG nova.network.neutron [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 886.556520] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 886.625150] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 886.655327] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 886.655579] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 886.655738] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 886.655920] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 886.656302] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 886.656493] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 886.656754] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 886.656959] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 886.657105] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 886.660601] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 886.660877] env[62730]: DEBUG nova.virt.hardware [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 886.662158] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1fc03c-fe63-4940-a958-6756825af645 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.666488] env[62730]: DEBUG nova.policy [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ec4bf5f7e104e0d8eae7bdd98861641', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ae994dbceb044ef8c023cb31350f1ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 886.673768] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64589178-3b4d-48c8-8f62-1f5b270df9d0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.362226] env[62730]: DEBUG nova.network.neutron [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Successfully created port: 6ec47e1b-edbd-459e-a11d-cca8ecb06110 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.115636] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.616595] env[62730]: DEBUG nova.network.neutron [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Successfully updated port: 6ec47e1b-edbd-459e-a11d-cca8ecb06110 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.631575] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.631852] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.632043] env[62730]: DEBUG nova.network.neutron [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 888.740011] env[62730]: DEBUG nova.network.neutron [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 889.328111] env[62730]: DEBUG nova.network.neutron [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Updating instance_info_cache with network_info: [{"id": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "address": "fa:16:3e:fe:89:1e", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ec47e1b-ed", "ovs_interfaceid": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.349792] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Releasing lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.350132] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Instance network_info: |[{"id": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "address": "fa:16:3e:fe:89:1e", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ec47e1b-ed", "ovs_interfaceid": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.350546] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:89:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4349e30-c086-4c24-9e0e-83996d808a1b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ec47e1b-edbd-459e-a11d-cca8ecb06110', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 889.358849] env[62730]: DEBUG oslo.service.loopingcall [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 889.360549] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 889.361631] env[62730]: DEBUG nova.compute.manager [req-bb2a70aa-51a2-4230-939d-5e2badd0ec03 req-294d4396-d023-4686-b4b8-59cf4baff369 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Received event network-vif-plugged-6ec47e1b-edbd-459e-a11d-cca8ecb06110 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 889.361842] env[62730]: DEBUG oslo_concurrency.lockutils [req-bb2a70aa-51a2-4230-939d-5e2badd0ec03 req-294d4396-d023-4686-b4b8-59cf4baff369 service nova] Acquiring lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.362051] env[62730]: DEBUG oslo_concurrency.lockutils [req-bb2a70aa-51a2-4230-939d-5e2badd0ec03 req-294d4396-d023-4686-b4b8-59cf4baff369 service nova] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.362218] env[62730]: DEBUG oslo_concurrency.lockutils [req-bb2a70aa-51a2-4230-939d-5e2badd0ec03 req-294d4396-d023-4686-b4b8-59cf4baff369 service nova] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.363986] env[62730]: DEBUG nova.compute.manager [req-bb2a70aa-51a2-4230-939d-5e2badd0ec03 req-294d4396-d023-4686-b4b8-59cf4baff369 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] No waiting events found dispatching network-vif-plugged-6ec47e1b-edbd-459e-a11d-cca8ecb06110 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 889.363986] env[62730]: WARNING nova.compute.manager [req-bb2a70aa-51a2-4230-939d-5e2badd0ec03 req-294d4396-d023-4686-b4b8-59cf4baff369 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Received unexpected event network-vif-plugged-6ec47e1b-edbd-459e-a11d-cca8ecb06110 for instance with vm_state building and task_state deleting. [ 889.363986] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42bfb31d-17f9-473b-af3c-775b2a82d06c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.384738] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 889.384738] env[62730]: value = "task-4837116" [ 889.384738] env[62730]: _type = "Task" [ 889.384738] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.394623] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837116, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.895748] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837116, 'name': CreateVM_Task, 'duration_secs': 0.320891} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.895993] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 889.896597] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.896768] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.897096] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 889.897356] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd093c35-0f81-462c-b2bc-218e88fa2f78 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.902698] env[62730]: DEBUG oslo_vmware.api [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for the task: (returnval){ [ 889.902698] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ad53e9-93ca-11fb-1e56-f1ccb610730d" [ 889.902698] env[62730]: _type = "Task" [ 889.902698] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.910986] env[62730]: DEBUG oslo_vmware.api [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ad53e9-93ca-11fb-1e56-f1ccb610730d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.413273] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.413759] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.413759] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.508445] env[62730]: DEBUG nova.compute.manager [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Received event network-changed-6ec47e1b-edbd-459e-a11d-cca8ecb06110 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 891.508738] env[62730]: DEBUG nova.compute.manager [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Refreshing instance network info cache due to event network-changed-6ec47e1b-edbd-459e-a11d-cca8ecb06110. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 891.508851] env[62730]: DEBUG oslo_concurrency.lockutils [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] Acquiring lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.508993] env[62730]: DEBUG oslo_concurrency.lockutils [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] Acquired lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.509181] env[62730]: DEBUG nova.network.neutron [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Refreshing network info cache for port 6ec47e1b-edbd-459e-a11d-cca8ecb06110 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 892.036577] env[62730]: DEBUG nova.network.neutron [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Updated VIF entry in instance network info cache for port 6ec47e1b-edbd-459e-a11d-cca8ecb06110. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 892.036937] env[62730]: DEBUG nova.network.neutron [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Updating instance_info_cache with network_info: [{"id": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "address": "fa:16:3e:fe:89:1e", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ec47e1b-ed", "ovs_interfaceid": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.046835] env[62730]: DEBUG oslo_concurrency.lockutils [req-73ec97fe-1d5f-403d-b6d0-c29f628e7031 req-987a1c2f-6759-422e-87ba-c117c5482f76 service nova] Releasing lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.738271] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.739617] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.739934] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.740045] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.738458] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.738718] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 908.738938] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 908.766664] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.766962] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.766962] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767102] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767222] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767347] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767472] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767597] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767805] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767878] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 908.767939] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 909.736943] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.733068] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.736622] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.749628] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.749901] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.750061] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.750251] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 910.751636] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9357c52-b151-4515-b9bb-5225c6796d96 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.762564] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb5e4f9-b150-49c3-a527-3350ce665eca {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.776793] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c5e43b-be10-4ab1-85a8-0a11eed43df5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.783327] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0951a826-f101-4fcc-b395-3f41498a2d3f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.812096] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180538MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 910.812233] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.812430] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.886361] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.886533] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.886667] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d90fd82e-a469-41c7-b414-c7eb5554e72a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.886795] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.886917] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.887057] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.887185] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.887304] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.887421] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.887538] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 910.898955] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.910110] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8504a95d-6003-4698-a3b5-4913eb59c932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.920789] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 13fb51cc-7cfc-44f9-9a15-381762007fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.931171] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cced1efc-f73f-43a5-8a13-de65ef5703b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.940972] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.951138] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 73339a8b-3cb0-40b6-a467-e78f58902876 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.961221] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f945b0e5-e0a5-493f-8fe7-7b3000b1e97b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.970983] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 76ff81c2-500f-4727-9d98-45b57f70eb3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.981642] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c6e1b57d-7dcc-4703-b7f6-e747c7a89204 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 910.991940] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0d3756a1-0483-44ae-9790-11627a5b6e02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.002178] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c104be3c-0108-468a-b99c-f0a3955d4c7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.013875] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9852654d-352a-4f6a-81b1-48d4399690e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.026639] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6868e76a-17a5-41d5-81bb-e83747586ffc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.037601] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ec9d765b-adb7-428c-9ab7-4a4cd90baa44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.048217] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.058254] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f0be97b5-35e3-4c67-96f6-c604a71c38b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.068441] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6ab13a84-4fcf-451a-a8d7-79ec54af27da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 911.068688] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 911.068852] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '29', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_cf705f506bcc4409881416d80a745afc': '1', 'io_workload': '10', 'num_task_spawning': '1', 'num_proj_7e26a6097b8c4bf3b6d4b77656087f8c': '2', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 911.408092] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe01faa9-d76b-4a16-839f-91d88fd10f7e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.416261] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e72a55-105f-4b80-ae42-b09f82a63ee1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.446799] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4007a56-98c9-408b-81ec-57921a8375fc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.454890] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b325d219-8f20-4854-95fe-edbe571588e7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.469034] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.477300] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.494918] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 911.495139] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.683s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.496358] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.496580] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 933.080795] env[62730]: WARNING oslo_vmware.rw_handles [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 933.080795] env[62730]: ERROR oslo_vmware.rw_handles [ 933.081363] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 933.083708] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 933.083976] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Copying Virtual Disk [datastore2] vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/67b4a97f-8072-4dbf-9848-3b45c2996205/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 933.084428] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63eab5a6-8e44-41cd-858a-55cf6f191402 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.095694] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 933.095694] env[62730]: value = "task-4837117" [ 933.095694] env[62730]: _type = "Task" [ 933.095694] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.104347] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': task-4837117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.606663] env[62730]: DEBUG oslo_vmware.exceptions [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 933.606910] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.607481] env[62730]: ERROR nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 933.607481] env[62730]: Faults: ['InvalidArgument'] [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Traceback (most recent call last): [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] yield resources [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self.driver.spawn(context, instance, image_meta, [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self._fetch_image_if_missing(context, vi) [ 933.607481] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] image_cache(vi, tmp_image_ds_loc) [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] vm_util.copy_virtual_disk( [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] session._wait_for_task(vmdk_copy_task) [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] return self.wait_for_task(task_ref) [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] return evt.wait() [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] result = hub.switch() [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 933.607764] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] return self.greenlet.switch() [ 933.608097] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 933.608097] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self.f(*self.args, **self.kw) [ 933.608097] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 933.608097] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] raise exceptions.translate_fault(task_info.error) [ 933.608097] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 933.608097] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Faults: ['InvalidArgument'] [ 933.608097] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] [ 933.608097] env[62730]: INFO nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Terminating instance [ 933.609776] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.609776] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.609776] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bec5a21c-4a25-490d-8dbb-38fa474942ae {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.611949] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 933.612183] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 933.612892] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68e0740-78b3-4099-87b0-cc04a1611cd5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.619982] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 933.620239] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4298169-b987-4696-9ad8-b19742873890 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.622453] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.622630] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 933.623648] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e36e96f8-aa16-4de8-a053-aa9d29da226e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.628343] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 933.628343] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52eb3bd9-55d3-1508-0825-5abca81cdb8e" [ 933.628343] env[62730]: _type = "Task" [ 933.628343] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.635912] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52eb3bd9-55d3-1508-0825-5abca81cdb8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.694056] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 933.694301] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 933.694487] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Deleting the datastore file [datastore2] d8ac549d-b27c-4d4a-a58b-de65bb5586f3 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.694836] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09a79fcc-0dc7-424d-b5d6-1a18271243a3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.701355] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for the task: (returnval){ [ 933.701355] env[62730]: value = "task-4837119" [ 933.701355] env[62730]: _type = "Task" [ 933.701355] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.709731] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': task-4837119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.141100] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 934.141100] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating directory with path [datastore2] vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.141100] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28412b91-53fe-4ef6-80e8-7cdbc8f9a569 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.153666] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Created directory with path [datastore2] vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.153666] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Fetch image to [datastore2] vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 934.153813] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 934.154667] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed71b707-7b00-4260-8bdd-cf892a7e67d2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.161707] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09824e6a-3836-4504-8671-3099ac0b71d2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.171052] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7957452-55b2-4a68-96a9-52120ecc66d2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.205570] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c19947a-735b-4041-9013-ab80f3cdc55d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.214475] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-238f5187-03b8-4490-835f-7dd75632284f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.216278] env[62730]: DEBUG oslo_vmware.api [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Task: {'id': task-4837119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087414} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.216514] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 934.216699] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 934.216873] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 934.217066] env[62730]: INFO nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 934.219189] env[62730]: DEBUG nova.compute.claims [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 934.219366] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.219580] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.252247] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 934.319576] env[62730]: DEBUG oslo_vmware.rw_handles [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 934.381684] env[62730]: DEBUG oslo_vmware.rw_handles [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 934.381908] env[62730]: DEBUG oslo_vmware.rw_handles [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 934.682637] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e383e9-c2db-4bc9-8f89-5174cab7d4f7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.691209] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8d3d22-80df-420c-87f7-0061b07fd0f4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.722296] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6e7dcf-7f37-4ebe-b659-bc20414e0baa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.730286] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc93738-fab4-45a5-938c-b7eb9fb6466f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.743873] env[62730]: DEBUG nova.compute.provider_tree [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.754421] env[62730]: DEBUG nova.scheduler.client.report [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 934.770119] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.550s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.770673] env[62730]: ERROR nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 934.770673] env[62730]: Faults: ['InvalidArgument'] [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Traceback (most recent call last): [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self.driver.spawn(context, instance, image_meta, [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self._fetch_image_if_missing(context, vi) [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] image_cache(vi, tmp_image_ds_loc) [ 934.770673] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] vm_util.copy_virtual_disk( [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] session._wait_for_task(vmdk_copy_task) [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] return self.wait_for_task(task_ref) [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] return evt.wait() [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] result = hub.switch() [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] return self.greenlet.switch() [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 934.771034] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] self.f(*self.args, **self.kw) [ 934.771309] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 934.771309] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] raise exceptions.translate_fault(task_info.error) [ 934.771309] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 934.771309] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Faults: ['InvalidArgument'] [ 934.771309] env[62730]: ERROR nova.compute.manager [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] [ 934.771419] env[62730]: DEBUG nova.compute.utils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 934.772970] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Build of instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 was re-scheduled: A specified parameter was not correct: fileType [ 934.772970] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 934.773432] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 934.773624] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 934.773796] env[62730]: DEBUG nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 934.773961] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 935.537516] env[62730]: DEBUG nova.network.neutron [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.550839] env[62730]: INFO nova.compute.manager [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Took 0.78 seconds to deallocate network for instance. [ 935.687336] env[62730]: INFO nova.scheduler.client.report [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Deleted allocations for instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 [ 935.718221] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c0d12200-89d1-42a2-b47d-0a72d590b89a tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 388.782s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.720639] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 189.765s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.720639] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Acquiring lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.720639] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.720639] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.722677] env[62730]: INFO nova.compute.manager [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Terminating instance [ 935.724764] env[62730]: DEBUG nova.compute.manager [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 935.724990] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 935.725486] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fac1e24-0545-4944-a754-2aa253f14f50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.735090] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6f93a5-3b79-4b5d-9c83-a89f3edb992a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.746990] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 935.769425] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d8ac549d-b27c-4d4a-a58b-de65bb5586f3 could not be found. [ 935.769860] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 935.769860] env[62730]: INFO nova.compute.manager [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 935.770216] env[62730]: DEBUG oslo.service.loopingcall [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.770346] env[62730]: DEBUG nova.compute.manager [-] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 935.770762] env[62730]: DEBUG nova.network.neutron [-] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 935.801773] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.802044] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.803552] env[62730]: INFO nova.compute.claims [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.806525] env[62730]: DEBUG nova.network.neutron [-] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.816877] env[62730]: INFO nova.compute.manager [-] [instance: d8ac549d-b27c-4d4a-a58b-de65bb5586f3] Took 0.05 seconds to deallocate network for instance. [ 935.929080] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f7167b59-dc09-4a7d-9678-9e36e8a684c1 tempest-ServersAdminTestJSON-491589472 tempest-ServersAdminTestJSON-491589472-project-member] Lock "d8ac549d-b27c-4d4a-a58b-de65bb5586f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.208722] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97e6fb8-af46-4a68-b53a-242856cbf830 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.216810] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65618c1c-2ae8-4861-89ed-6e9264eda9b1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.248102] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76ff859-a7d4-4292-8b97-6fb7bd5bb9d8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.256585] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceeabf31-1875-4500-98b8-e9660e3a8de3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.270674] env[62730]: DEBUG nova.compute.provider_tree [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.280762] env[62730]: DEBUG nova.scheduler.client.report [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.297413] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.495s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.297911] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 936.333395] env[62730]: DEBUG nova.compute.utils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 936.335157] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 936.335157] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 936.346454] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 936.429639] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 936.456243] env[62730]: DEBUG nova.policy [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cd5284131a047c5826c253495b16a0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d775e3135484ed8b81c9d2991f2bedb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 936.459747] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.459992] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.460176] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.460363] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.460522] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.460673] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.460987] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.461121] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.461321] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.461501] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.461679] env[62730]: DEBUG nova.virt.hardware [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.462877] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c838e84-8580-46ba-9a1c-8a7eac81fc24 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.472152] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82c321b-d474-430d-b53c-47aa61112200 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.177022] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Successfully created port: d86e506b-abce-444e-8e34-88abe5d8954f {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.752432] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Successfully updated port: d86e506b-abce-444e-8e34-88abe5d8954f {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.770535] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "refresh_cache-22f72732-e5e2-49dc-810a-ab90d7a367a0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.770679] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "refresh_cache-22f72732-e5e2-49dc-810a-ab90d7a367a0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.770830] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 938.861824] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.105810] env[62730]: DEBUG nova.compute.manager [req-55377680-371f-4090-93e0-e843296eb415 req-320b0bc5-7f1b-4e36-afcf-1c411af9ea4b service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Received event network-vif-plugged-d86e506b-abce-444e-8e34-88abe5d8954f {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 939.106129] env[62730]: DEBUG oslo_concurrency.lockutils [req-55377680-371f-4090-93e0-e843296eb415 req-320b0bc5-7f1b-4e36-afcf-1c411af9ea4b service nova] Acquiring lock "22f72732-e5e2-49dc-810a-ab90d7a367a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.106421] env[62730]: DEBUG oslo_concurrency.lockutils [req-55377680-371f-4090-93e0-e843296eb415 req-320b0bc5-7f1b-4e36-afcf-1c411af9ea4b service nova] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.106604] env[62730]: DEBUG oslo_concurrency.lockutils [req-55377680-371f-4090-93e0-e843296eb415 req-320b0bc5-7f1b-4e36-afcf-1c411af9ea4b service nova] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.106776] env[62730]: DEBUG nova.compute.manager [req-55377680-371f-4090-93e0-e843296eb415 req-320b0bc5-7f1b-4e36-afcf-1c411af9ea4b service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] No waiting events found dispatching network-vif-plugged-d86e506b-abce-444e-8e34-88abe5d8954f {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 939.106943] env[62730]: WARNING nova.compute.manager [req-55377680-371f-4090-93e0-e843296eb415 req-320b0bc5-7f1b-4e36-afcf-1c411af9ea4b service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Received unexpected event network-vif-plugged-d86e506b-abce-444e-8e34-88abe5d8954f for instance with vm_state building and task_state spawning. [ 939.185590] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Updating instance_info_cache with network_info: [{"id": "d86e506b-abce-444e-8e34-88abe5d8954f", "address": "fa:16:3e:e8:e0:1b", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86e506b-ab", "ovs_interfaceid": "d86e506b-abce-444e-8e34-88abe5d8954f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.201658] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "refresh_cache-22f72732-e5e2-49dc-810a-ab90d7a367a0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.202016] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Instance network_info: |[{"id": "d86e506b-abce-444e-8e34-88abe5d8954f", "address": "fa:16:3e:e8:e0:1b", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86e506b-ab", "ovs_interfaceid": "d86e506b-abce-444e-8e34-88abe5d8954f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 939.202684] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:e0:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c13fd8bc-e797-42fe-94ed-6370d3467a7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd86e506b-abce-444e-8e34-88abe5d8954f', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.212058] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating folder: Project (7d775e3135484ed8b81c9d2991f2bedb). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 939.212677] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61bb90aa-a07f-4266-a97d-69637067f914 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.226636] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created folder: Project (7d775e3135484ed8b81c9d2991f2bedb) in parent group-v942928. [ 939.226853] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating folder: Instances. Parent ref: group-v942974. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 939.227130] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c679a94f-1962-4ec7-8ac0-bb3cf0110b00 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.236691] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created folder: Instances in parent group-v942974. [ 939.237012] env[62730]: DEBUG oslo.service.loopingcall [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.237161] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 939.237371] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-236ec586-5603-4962-b7a3-590ce1d91f13 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.258144] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.258144] env[62730]: value = "task-4837122" [ 939.258144] env[62730]: _type = "Task" [ 939.258144] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.267553] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837122, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.768528] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837122, 'name': CreateVM_Task, 'duration_secs': 0.389927} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.768786] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 939.769410] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.769581] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.770015] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 939.772026] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b70e9f79-82db-48bc-b9e2-774919bb264a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.776624] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 939.776624] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]525ffafc-3b84-1191-c82a-0f5f900ff167" [ 939.776624] env[62730]: _type = "Task" [ 939.776624] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.789223] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]525ffafc-3b84-1191-c82a-0f5f900ff167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.291019] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.291349] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.291579] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.129139] env[62730]: DEBUG nova.compute.manager [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Received event network-changed-d86e506b-abce-444e-8e34-88abe5d8954f {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 941.129378] env[62730]: DEBUG nova.compute.manager [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Refreshing instance network info cache due to event network-changed-d86e506b-abce-444e-8e34-88abe5d8954f. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 941.129506] env[62730]: DEBUG oslo_concurrency.lockutils [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] Acquiring lock "refresh_cache-22f72732-e5e2-49dc-810a-ab90d7a367a0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.129603] env[62730]: DEBUG oslo_concurrency.lockutils [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] Acquired lock "refresh_cache-22f72732-e5e2-49dc-810a-ab90d7a367a0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.129768] env[62730]: DEBUG nova.network.neutron [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Refreshing network info cache for port d86e506b-abce-444e-8e34-88abe5d8954f {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 941.881628] env[62730]: DEBUG nova.network.neutron [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Updated VIF entry in instance network info cache for port d86e506b-abce-444e-8e34-88abe5d8954f. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 941.882009] env[62730]: DEBUG nova.network.neutron [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Updating instance_info_cache with network_info: [{"id": "d86e506b-abce-444e-8e34-88abe5d8954f", "address": "fa:16:3e:e8:e0:1b", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd86e506b-ab", "ovs_interfaceid": "d86e506b-abce-444e-8e34-88abe5d8954f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.898111] env[62730]: DEBUG oslo_concurrency.lockutils [req-9cc03f39-854b-4b09-a787-88f60090a2f8 req-ae4bb9b5-7149-4c6b-a33d-3342e3d8172e service nova] Releasing lock "refresh_cache-22f72732-e5e2-49dc-810a-ab90d7a367a0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.790977] env[62730]: DEBUG oslo_concurrency.lockutils [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.625523] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.625523] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.702470] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.735058] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.737171] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.738603] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.738904] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.739028] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.715330] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "3a61955c-d6df-4024-bc41-b1100a89fd7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.715330] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.733568] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.737341] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.737506] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 970.737622] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 970.767711] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.767866] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.767996] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.768354] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.768494] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.768621] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.768739] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.768853] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.768966] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.769089] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 970.769203] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 970.769737] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.769945] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.784107] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.784325] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.784491] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.785308] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 970.785810] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111d9526-98b7-45d6-8943-6d7dda7c3737 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.797098] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a74cbdd-cb2c-437e-b6a1-2b32175be84d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.813352] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cef1ed0-2948-4b4f-9d5c-6f152cbae0a4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.820878] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36da4db-4cd1-47ad-b6ef-72db29a7be3b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.856543] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180493MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 970.856596] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.856820] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.960584] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.960767] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d90fd82e-a469-41c7-b414-c7eb5554e72a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.960898] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.961035] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.961157] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.961324] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.961508] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.961676] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.961909] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.962082] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 970.976193] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 13fb51cc-7cfc-44f9-9a15-381762007fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 970.993183] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cced1efc-f73f-43a5-8a13-de65ef5703b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.011166] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.024326] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 73339a8b-3cb0-40b6-a467-e78f58902876 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.042032] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f945b0e5-e0a5-493f-8fe7-7b3000b1e97b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.056406] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 76ff81c2-500f-4727-9d98-45b57f70eb3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.069413] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c6e1b57d-7dcc-4703-b7f6-e747c7a89204 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.083669] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 0d3756a1-0483-44ae-9790-11627a5b6e02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.101038] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c104be3c-0108-468a-b99c-f0a3955d4c7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.115746] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9852654d-352a-4f6a-81b1-48d4399690e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.129172] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6868e76a-17a5-41d5-81bb-e83747586ffc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.143153] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ec9d765b-adb7-428c-9ab7-4a4cd90baa44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.157194] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.173895] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f0be97b5-35e3-4c67-96f6-c604a71c38b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.192067] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6ab13a84-4fcf-451a-a8d7-79ec54af27da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.204047] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.217995] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 971.218270] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 971.218434] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '30', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_7e26a6097b8c4bf3b6d4b77656087f8c': '2', 'io_workload': '10', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 971.692065] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5840868-6a78-46ae-929a-1780de5dd49a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.700328] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab695cd-c18b-42be-a0b2-194461d3c443 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.732506] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea8e6aa-019a-429f-9c39-217b0a6e29f0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.740990] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b5dfba-221d-4ff5-ac4a-be7b0b1b9f3d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.755294] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.766452] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 971.781178] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 971.781383] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.925s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.403282] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c66a379c-202b-47c2-b203-26231f26fb35 tempest-ServersV294TestFqdnHostnames-1731655541 tempest-ServersV294TestFqdnHostnames-1731655541-project-member] Acquiring lock "77b49a77-2048-4812-93bc-aba06586d2a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.403509] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c66a379c-202b-47c2-b203-26231f26fb35 tempest-ServersV294TestFqdnHostnames-1731655541 tempest-ServersV294TestFqdnHostnames-1731655541-project-member] Lock "77b49a77-2048-4812-93bc-aba06586d2a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.475470] env[62730]: DEBUG oslo_concurrency.lockutils [None req-96dbf19d-3fa6-426a-8698-ba570676fead tempest-ServerDiagnosticsTest-42425005 tempest-ServerDiagnosticsTest-42425005-project-member] Acquiring lock "8ab13896-dd97-47cc-8013-9fe9dc791ef6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.475789] env[62730]: DEBUG oslo_concurrency.lockutils [None req-96dbf19d-3fa6-426a-8698-ba570676fead tempest-ServerDiagnosticsTest-42425005 tempest-ServerDiagnosticsTest-42425005-project-member] Lock "8ab13896-dd97-47cc-8013-9fe9dc791ef6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.749076] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 975.749328] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 981.748809] env[62730]: WARNING oslo_vmware.rw_handles [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 981.748809] env[62730]: ERROR oslo_vmware.rw_handles [ 981.749488] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 981.751076] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 981.751318] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Copying Virtual Disk [datastore2] vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/5d4595aa-3ed9-43c1-8d4f-34974f895276/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 981.751598] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13b474ea-4c47-4873-b85d-c303879e6aca {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.760208] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 981.760208] env[62730]: value = "task-4837123" [ 981.760208] env[62730]: _type = "Task" [ 981.760208] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.768606] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': task-4837123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.271313] env[62730]: DEBUG oslo_vmware.exceptions [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 982.271568] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.272235] env[62730]: ERROR nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 982.272235] env[62730]: Faults: ['InvalidArgument'] [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Traceback (most recent call last): [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] yield resources [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self.driver.spawn(context, instance, image_meta, [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self._fetch_image_if_missing(context, vi) [ 982.272235] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] image_cache(vi, tmp_image_ds_loc) [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] vm_util.copy_virtual_disk( [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] session._wait_for_task(vmdk_copy_task) [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] return self.wait_for_task(task_ref) [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] return evt.wait() [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] result = hub.switch() [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 982.272750] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] return self.greenlet.switch() [ 982.273109] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 982.273109] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self.f(*self.args, **self.kw) [ 982.273109] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 982.273109] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] raise exceptions.translate_fault(task_info.error) [ 982.273109] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 982.273109] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Faults: ['InvalidArgument'] [ 982.273109] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] [ 982.273109] env[62730]: INFO nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Terminating instance [ 982.274261] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.274527] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.274793] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7858c607-14e4-4b7e-a621-09a144e59208 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.277121] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 982.277363] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 982.278131] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9550321-9744-45b1-9698-786cfcefeb98 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.286071] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 982.286071] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b10af0c-c008-4486-81fb-cf4c3364ad1a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.288401] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.288576] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 982.289577] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccceeb95-c33a-4f62-b38b-2bf6df16f432 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.294939] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 982.294939] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5222cdd0-4f8b-b2c3-e339-5123d276c800" [ 982.294939] env[62730]: _type = "Task" [ 982.294939] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.302921] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5222cdd0-4f8b-b2c3-e339-5123d276c800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.363430] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 982.363723] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 982.363914] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Deleting the datastore file [datastore2] 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.364286] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6eb8f2a9-7923-4b93-ad53-ade5dd61ab60 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.371939] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 982.371939] env[62730]: value = "task-4837125" [ 982.371939] env[62730]: _type = "Task" [ 982.371939] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.380842] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': task-4837125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.806025] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 982.806603] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating directory with path [datastore2] vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.806603] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b53d02f8-4ed9-4de5-9a25-85bf3ce556d0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.819469] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Created directory with path [datastore2] vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.819715] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Fetch image to [datastore2] vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 982.819903] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 982.820808] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a62a1af-df91-48f6-b9ce-cc0f7b4b3bfe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.828671] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113941f6-7642-4ef6-89d9-56034cc92c79 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.838518] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89a1600-c9b5-4450-9d22-2d06bd9c3cd2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.871102] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d613d5fb-5c16-430a-992f-4fe7684e3f06 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.883283] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d345307d-3010-4202-afb1-8d2090fe7abf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.885175] env[62730]: DEBUG oslo_vmware.api [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': task-4837125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07811} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.885422] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.885606] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 982.885784] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 982.885958] env[62730]: INFO nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 982.888577] env[62730]: DEBUG nova.compute.claims [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 982.888762] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.889027] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.912512] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 982.991539] env[62730]: DEBUG oslo_vmware.rw_handles [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 983.052312] env[62730]: DEBUG oslo_vmware.rw_handles [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 983.052541] env[62730]: DEBUG oslo_vmware.rw_handles [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 983.373329] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8921cd26-3032-4fb3-8bb0-1f549cf41dbc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.381494] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcc0d46-6dd9-4f18-80ab-3638e81922fc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.415842] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b72fbbe-d384-4bc7-81e6-56d24ced5e07 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.424292] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5019853a-f944-4ac4-86c6-b621bc11181e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.438828] env[62730]: DEBUG nova.compute.provider_tree [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.448219] env[62730]: DEBUG nova.scheduler.client.report [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 983.463136] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.574s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.463754] env[62730]: ERROR nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 983.463754] env[62730]: Faults: ['InvalidArgument'] [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Traceback (most recent call last): [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self.driver.spawn(context, instance, image_meta, [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self._fetch_image_if_missing(context, vi) [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] image_cache(vi, tmp_image_ds_loc) [ 983.463754] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] vm_util.copy_virtual_disk( [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] session._wait_for_task(vmdk_copy_task) [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] return self.wait_for_task(task_ref) [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] return evt.wait() [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] result = hub.switch() [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] return self.greenlet.switch() [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 983.464094] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] self.f(*self.args, **self.kw) [ 983.464389] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 983.464389] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] raise exceptions.translate_fault(task_info.error) [ 983.464389] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 983.464389] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Faults: ['InvalidArgument'] [ 983.464389] env[62730]: ERROR nova.compute.manager [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] [ 983.464834] env[62730]: DEBUG nova.compute.utils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 983.467351] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Build of instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 was re-scheduled: A specified parameter was not correct: fileType [ 983.467351] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 983.467765] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 983.467995] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 983.468222] env[62730]: DEBUG nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 983.468473] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 984.096044] env[62730]: DEBUG nova.network.neutron [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.107711] env[62730]: INFO nova.compute.manager [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Took 0.64 seconds to deallocate network for instance. [ 984.221552] env[62730]: INFO nova.scheduler.client.report [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Deleted allocations for instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 [ 984.244403] env[62730]: DEBUG oslo_concurrency.lockutils [None req-98a29859-bdb2-4457-ae9a-2236aa42836e tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 428.659s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.245666] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 27.543s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.245894] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.246113] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.246288] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.248411] env[62730]: INFO nova.compute.manager [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Terminating instance [ 984.250863] env[62730]: DEBUG nova.compute.manager [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 984.251133] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 984.251712] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bd68260-0151-4d36-a834-b3a9ccfb2019 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.258266] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 8504a95d-6003-4698-a3b5-4913eb59c932] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 984.270025] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd0db82-c4ea-4ff6-b7c5-54e4f15355e1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.285460] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 8504a95d-6003-4698-a3b5-4913eb59c932] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 984.303498] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2ed97ed9-4e81-484c-9f0e-baa6968b58a4 could not be found. [ 984.303736] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 984.303934] env[62730]: INFO nova.compute.manager [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 984.304202] env[62730]: DEBUG oslo.service.loopingcall [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.306585] env[62730]: DEBUG nova.compute.manager [-] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 984.306662] env[62730]: DEBUG nova.network.neutron [-] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 984.316510] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "8504a95d-6003-4698-a3b5-4913eb59c932" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.711s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.329544] env[62730]: DEBUG nova.compute.manager [None req-45b3bb65-63ea-478a-b5b7-9b7a570f82ca tempest-ServersListShow296Test-183377682 tempest-ServersListShow296Test-183377682-project-member] [instance: 13fb51cc-7cfc-44f9-9a15-381762007fe7] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 984.335525] env[62730]: DEBUG nova.network.neutron [-] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.346079] env[62730]: INFO nova.compute.manager [-] [instance: 2ed97ed9-4e81-484c-9f0e-baa6968b58a4] Took 0.04 seconds to deallocate network for instance. [ 984.366075] env[62730]: DEBUG nova.compute.manager [None req-45b3bb65-63ea-478a-b5b7-9b7a570f82ca tempest-ServersListShow296Test-183377682 tempest-ServersListShow296Test-183377682-project-member] [instance: 13fb51cc-7cfc-44f9-9a15-381762007fe7] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 984.389193] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45b3bb65-63ea-478a-b5b7-9b7a570f82ca tempest-ServersListShow296Test-183377682 tempest-ServersListShow296Test-183377682-project-member] Lock "13fb51cc-7cfc-44f9-9a15-381762007fe7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.042s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.401617] env[62730]: DEBUG nova.compute.manager [None req-e2dbacb2-8582-48c6-a71e-4c0fdd5d7ff2 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: cced1efc-f73f-43a5-8a13-de65ef5703b4] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 984.428504] env[62730]: DEBUG nova.compute.manager [None req-e2dbacb2-8582-48c6-a71e-4c0fdd5d7ff2 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: cced1efc-f73f-43a5-8a13-de65ef5703b4] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 984.450463] env[62730]: DEBUG oslo_concurrency.lockutils [None req-45ec9f9d-9ec0-46e2-a2c4-130cb0d60b8b tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "2ed97ed9-4e81-484c-9f0e-baa6968b58a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.456529] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2dbacb2-8582-48c6-a71e-4c0fdd5d7ff2 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "cced1efc-f73f-43a5-8a13-de65ef5703b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.883s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.466048] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 984.520402] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.520700] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.522216] env[62730]: INFO nova.compute.claims [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.931995] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1282596-5dca-4b8c-b36a-1c725789f3bd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.940620] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd1d4b9-a158-40c3-a0bb-dabe1d726181 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.971721] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bb2aca-b14e-4a89-a1fa-87dd8a03af8e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.978498] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2e995aa2-8e1c-4a72-9516-9a28e9097f05 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "699d4cca-99b8-4517-957b-949afe791aed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.978770] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2e995aa2-8e1c-4a72-9516-9a28e9097f05 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "699d4cca-99b8-4517-957b-949afe791aed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.984253] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c903cf6a-6439-4dd8-9510-657718bdb991 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.999749] env[62730]: DEBUG nova.compute.provider_tree [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.011128] env[62730]: DEBUG nova.scheduler.client.report [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 985.025898] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.505s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.026435] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 985.061501] env[62730]: DEBUG nova.compute.utils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 985.062806] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 985.066018] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 985.074354] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 985.162050] env[62730]: DEBUG nova.policy [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18e47ee02b564e809516edbb7c267817', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47edc70d81cc4ea68d8da7bec4c625d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 985.188832] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 985.215411] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 985.215679] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 985.215842] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 985.216039] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 985.216193] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 985.216346] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 985.216573] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 985.216738] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 985.216904] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 985.217081] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 985.217259] env[62730]: DEBUG nova.virt.hardware [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 985.218180] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10aec129-2cc3-452b-981f-84a50fc68703 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.227234] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ca7f56-2e72-41dd-a076-4b4c635cbbb8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.707536] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Successfully created port: cada6b30-a2f5-4223-af4a-ae0f84edf179 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.078811] env[62730]: DEBUG nova.compute.manager [req-eda10544-0d2f-4201-92ae-f59a1b69131c req-9d7682a5-5506-4521-958c-7441234bd5f3 service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Received event network-vif-plugged-cada6b30-a2f5-4223-af4a-ae0f84edf179 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 987.079118] env[62730]: DEBUG oslo_concurrency.lockutils [req-eda10544-0d2f-4201-92ae-f59a1b69131c req-9d7682a5-5506-4521-958c-7441234bd5f3 service nova] Acquiring lock "01a34662-fef9-4855-ba3c-39184982fd0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.079309] env[62730]: DEBUG oslo_concurrency.lockutils [req-eda10544-0d2f-4201-92ae-f59a1b69131c req-9d7682a5-5506-4521-958c-7441234bd5f3 service nova] Lock "01a34662-fef9-4855-ba3c-39184982fd0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.079503] env[62730]: DEBUG oslo_concurrency.lockutils [req-eda10544-0d2f-4201-92ae-f59a1b69131c req-9d7682a5-5506-4521-958c-7441234bd5f3 service nova] Lock "01a34662-fef9-4855-ba3c-39184982fd0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.079599] env[62730]: DEBUG nova.compute.manager [req-eda10544-0d2f-4201-92ae-f59a1b69131c req-9d7682a5-5506-4521-958c-7441234bd5f3 service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] No waiting events found dispatching network-vif-plugged-cada6b30-a2f5-4223-af4a-ae0f84edf179 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 987.079801] env[62730]: WARNING nova.compute.manager [req-eda10544-0d2f-4201-92ae-f59a1b69131c req-9d7682a5-5506-4521-958c-7441234bd5f3 service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Received unexpected event network-vif-plugged-cada6b30-a2f5-4223-af4a-ae0f84edf179 for instance with vm_state building and task_state spawning. [ 987.151797] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Successfully updated port: cada6b30-a2f5-4223-af4a-ae0f84edf179 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 987.165947] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "refresh_cache-01a34662-fef9-4855-ba3c-39184982fd0e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.166104] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired lock "refresh_cache-01a34662-fef9-4855-ba3c-39184982fd0e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.166277] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 987.261643] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 987.660494] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Updating instance_info_cache with network_info: [{"id": "cada6b30-a2f5-4223-af4a-ae0f84edf179", "address": "fa:16:3e:f0:43:9b", "network": {"id": "648e3de4-36c9-4c09-8725-85988b2e227f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1080900941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47edc70d81cc4ea68d8da7bec4c625d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcada6b30-a2", "ovs_interfaceid": "cada6b30-a2f5-4223-af4a-ae0f84edf179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.678755] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Releasing lock "refresh_cache-01a34662-fef9-4855-ba3c-39184982fd0e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.678755] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Instance network_info: |[{"id": "cada6b30-a2f5-4223-af4a-ae0f84edf179", "address": "fa:16:3e:f0:43:9b", "network": {"id": "648e3de4-36c9-4c09-8725-85988b2e227f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1080900941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47edc70d81cc4ea68d8da7bec4c625d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcada6b30-a2", "ovs_interfaceid": "cada6b30-a2f5-4223-af4a-ae0f84edf179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 987.679492] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:43:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cada6b30-a2f5-4223-af4a-ae0f84edf179', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 987.687632] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating folder: Project (47edc70d81cc4ea68d8da7bec4c625d0). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 987.688320] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e6cdf3d-41da-4266-9e79-7da58d14e230 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.699876] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Created folder: Project (47edc70d81cc4ea68d8da7bec4c625d0) in parent group-v942928. [ 987.700179] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating folder: Instances. Parent ref: group-v942977. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 987.700486] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0583fec-706e-4d9b-a1a7-6fab4f1bd3ee {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.712729] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Created folder: Instances in parent group-v942977. [ 987.712985] env[62730]: DEBUG oslo.service.loopingcall [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 987.714310] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 987.714310] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6cca803-2b25-4773-9eba-2e9fb3eb2226 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.737288] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 987.737288] env[62730]: value = "task-4837128" [ 987.737288] env[62730]: _type = "Task" [ 987.737288] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.746865] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837128, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.192789] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "01a34662-fef9-4855-ba3c-39184982fd0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.253219] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837128, 'name': CreateVM_Task, 'duration_secs': 0.338291} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.253219] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 988.253477] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.253836] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.254732] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.255088] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f15aa0e0-51e9-4fb0-9c03-66e1e8e30a95 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.261426] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 988.261426] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d2cb0d-dca7-b0cb-2a14-ad135e953e7a" [ 988.261426] env[62730]: _type = "Task" [ 988.261426] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.274525] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d2cb0d-dca7-b0cb-2a14-ad135e953e7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.772406] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.772703] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.772922] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.149090] env[62730]: DEBUG nova.compute.manager [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Received event network-changed-cada6b30-a2f5-4223-af4a-ae0f84edf179 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 989.149296] env[62730]: DEBUG nova.compute.manager [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Refreshing instance network info cache due to event network-changed-cada6b30-a2f5-4223-af4a-ae0f84edf179. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 989.149513] env[62730]: DEBUG oslo_concurrency.lockutils [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] Acquiring lock "refresh_cache-01a34662-fef9-4855-ba3c-39184982fd0e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.149657] env[62730]: DEBUG oslo_concurrency.lockutils [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] Acquired lock "refresh_cache-01a34662-fef9-4855-ba3c-39184982fd0e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.149821] env[62730]: DEBUG nova.network.neutron [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Refreshing network info cache for port cada6b30-a2f5-4223-af4a-ae0f84edf179 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 989.681777] env[62730]: DEBUG nova.network.neutron [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Updated VIF entry in instance network info cache for port cada6b30-a2f5-4223-af4a-ae0f84edf179. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 989.681777] env[62730]: DEBUG nova.network.neutron [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Updating instance_info_cache with network_info: [{"id": "cada6b30-a2f5-4223-af4a-ae0f84edf179", "address": "fa:16:3e:f0:43:9b", "network": {"id": "648e3de4-36c9-4c09-8725-85988b2e227f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1080900941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47edc70d81cc4ea68d8da7bec4c625d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcada6b30-a2", "ovs_interfaceid": "cada6b30-a2f5-4223-af4a-ae0f84edf179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.691592] env[62730]: DEBUG oslo_concurrency.lockutils [req-06c467df-e901-4404-874a-f7735e15a4f0 req-04c79cc9-b74a-48a7-85a1-14a9b26d6b2b service nova] Releasing lock "refresh_cache-01a34662-fef9-4855-ba3c-39184982fd0e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.747223] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "435af367-8af8-4e07-b96a-923d32cc645e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.747621] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "435af367-8af8-4e07-b96a-923d32cc645e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.877493] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5b2d8d30-2b7e-41f0-a7d8-0d91f5e1e473 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "7f22463d-9e8c-4d5b-b30e-86654f34b633" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.877873] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5b2d8d30-2b7e-41f0-a7d8-0d91f5e1e473 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "7f22463d-9e8c-4d5b-b30e-86654f34b633" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.184525] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e1beb23e-4933-4a59-83fe-c27c4be1767c tempest-ServerShowV254Test-1052167189 tempest-ServerShowV254Test-1052167189-project-member] Acquiring lock "8f51fc3b-205b-41cb-bc95-1f0e694dda76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.184823] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e1beb23e-4933-4a59-83fe-c27c4be1767c tempest-ServerShowV254Test-1052167189 tempest-ServerShowV254Test-1052167189-project-member] Lock "8f51fc3b-205b-41cb-bc95-1f0e694dda76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.690676] env[62730]: DEBUG oslo_concurrency.lockutils [None req-6485cd2a-b3a3-46ec-a981-0cd7879ffc1d tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "ffe28344-6909-4252-b899-4a2d66b1d6df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.690979] env[62730]: DEBUG oslo_concurrency.lockutils [None req-6485cd2a-b3a3-46ec-a981-0cd7879ffc1d tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "ffe28344-6909-4252-b899-4a2d66b1d6df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.653132] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8a8bbdd7-53ce-4fcd-abeb-b7a5757510f8 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Acquiring lock "234808e0-4e10-4209-96c0-fa61fe2cdbe3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.653395] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8a8bbdd7-53ce-4fcd-abeb-b7a5757510f8 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Lock "234808e0-4e10-4209-96c0-fa61fe2cdbe3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.432030] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1e738207-f61d-4568-b768-7d5014c26714 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "de6f4f4c-b07a-437e-b01b-e7a7b600fc25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.432413] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1e738207-f61d-4568-b768-7d5014c26714 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "de6f4f4c-b07a-437e-b01b-e7a7b600fc25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.906026] env[62730]: DEBUG oslo_concurrency.lockutils [None req-901d04a0-d539-42bc-b91d-28859c8f67f3 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Acquiring lock "81f8a8a0-9897-424e-aaa7-02e902b996d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.906026] env[62730]: DEBUG oslo_concurrency.lockutils [None req-901d04a0-d539-42bc-b91d-28859c8f67f3 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "81f8a8a0-9897-424e-aaa7-02e902b996d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.738283] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.115402] env[62730]: WARNING oslo_vmware.rw_handles [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1028.115402] env[62730]: ERROR oslo_vmware.rw_handles [ 1028.115987] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1028.118310] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1028.118600] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Copying Virtual Disk [datastore2] vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/7f521c88-0dca-44b7-a888-9b005a610fbd/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1028.118925] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79b677f2-009a-4e96-9d92-01635ea9e437 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.127516] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 1028.127516] env[62730]: value = "task-4837129" [ 1028.127516] env[62730]: _type = "Task" [ 1028.127516] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.136231] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': task-4837129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.638524] env[62730]: DEBUG oslo_vmware.exceptions [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1028.638682] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.639155] env[62730]: ERROR nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1028.639155] env[62730]: Faults: ['InvalidArgument'] [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Traceback (most recent call last): [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] yield resources [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self.driver.spawn(context, instance, image_meta, [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self._fetch_image_if_missing(context, vi) [ 1028.639155] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] image_cache(vi, tmp_image_ds_loc) [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] vm_util.copy_virtual_disk( [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] session._wait_for_task(vmdk_copy_task) [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] return self.wait_for_task(task_ref) [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] return evt.wait() [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] result = hub.switch() [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1028.639484] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] return self.greenlet.switch() [ 1028.639796] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1028.639796] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self.f(*self.args, **self.kw) [ 1028.639796] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1028.639796] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] raise exceptions.translate_fault(task_info.error) [ 1028.639796] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1028.639796] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Faults: ['InvalidArgument'] [ 1028.639796] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] [ 1028.639796] env[62730]: INFO nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Terminating instance [ 1028.641119] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.641339] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.641949] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1028.642192] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1028.642426] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8d922bc-4d51-4167-8f50-a4a2c2424b50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.644915] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097c8445-34bd-4570-93e0-d56664756067 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.652324] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1028.653374] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d93a10eb-d3bd-44c8-a781-7a4d540842c6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.654796] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.654976] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1028.655646] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c670b06-7059-4d62-8d26-cc5586074d53 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.660641] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Waiting for the task: (returnval){ [ 1028.660641] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]529c40ac-4d5f-4ab2-9dc2-43d93a66984f" [ 1028.660641] env[62730]: _type = "Task" [ 1028.660641] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.667798] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]529c40ac-4d5f-4ab2-9dc2-43d93a66984f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.724575] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1028.724831] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1028.725128] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Deleting the datastore file [datastore2] d90fd82e-a469-41c7-b414-c7eb5554e72a {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.725325] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf5c3794-1c32-4408-ae57-dd5d62751ca2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.734665] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for the task: (returnval){ [ 1028.734665] env[62730]: value = "task-4837131" [ 1028.734665] env[62730]: _type = "Task" [ 1028.734665] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.738489] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.738693] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.739362] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.743986] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': task-4837131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.173501] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1029.173786] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Creating directory with path [datastore2] vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.174629] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-065b4a70-73a6-4d1f-9973-f8b8885cf776 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.187060] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Created directory with path [datastore2] vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.187197] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Fetch image to [datastore2] vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1029.187374] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1029.188183] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2592caed-d19e-4595-86f8-60aec24ee63f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.196058] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f56751-ec5c-4ee6-acbf-3d6c693bf7ec {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.205747] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76040293-9af7-49ca-947e-214b1f51a906 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.243868] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e45f724-6130-4e87-86d6-2099336bab2e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.253737] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7659902f-f649-4ba8-a221-e6bd15cb5e39 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.255701] env[62730]: DEBUG oslo_vmware.api [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Task: {'id': task-4837131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081972} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.255928] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.256130] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1029.256308] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1029.256498] env[62730]: INFO nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1029.258977] env[62730]: DEBUG nova.compute.claims [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1029.259098] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.259481] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.279241] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1029.344829] env[62730]: DEBUG oslo_vmware.rw_handles [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1029.409200] env[62730]: DEBUG oslo_vmware.rw_handles [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1029.409711] env[62730]: DEBUG oslo_vmware.rw_handles [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1029.728210] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fd8cdc-3c18-4a6a-ab62-97a6fff41c75 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.736575] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e962a33-3a4f-4b2b-bc4c-f762585b7aaa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.770763] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50eaa86c-9fe2-4e1d-bb08-4777eb0d2e07 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.779208] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec71d6c2-d680-4f74-8c4d-3e3b10a95273 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.793912] env[62730]: DEBUG nova.compute.provider_tree [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.804754] env[62730]: DEBUG nova.scheduler.client.report [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1029.820057] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.561s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.820616] env[62730]: ERROR nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1029.820616] env[62730]: Faults: ['InvalidArgument'] [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Traceback (most recent call last): [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self.driver.spawn(context, instance, image_meta, [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self._fetch_image_if_missing(context, vi) [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] image_cache(vi, tmp_image_ds_loc) [ 1029.820616] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] vm_util.copy_virtual_disk( [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] session._wait_for_task(vmdk_copy_task) [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] return self.wait_for_task(task_ref) [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] return evt.wait() [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] result = hub.switch() [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] return self.greenlet.switch() [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1029.820933] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] self.f(*self.args, **self.kw) [ 1029.821248] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1029.821248] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] raise exceptions.translate_fault(task_info.error) [ 1029.821248] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1029.821248] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Faults: ['InvalidArgument'] [ 1029.821248] env[62730]: ERROR nova.compute.manager [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] [ 1029.821365] env[62730]: DEBUG nova.compute.utils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1029.822797] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Build of instance d90fd82e-a469-41c7-b414-c7eb5554e72a was re-scheduled: A specified parameter was not correct: fileType [ 1029.822797] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1029.823233] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1029.823419] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1029.823575] env[62730]: DEBUG nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1029.823744] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1030.353507] env[62730]: DEBUG nova.network.neutron [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.370730] env[62730]: INFO nova.compute.manager [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Took 0.55 seconds to deallocate network for instance. [ 1030.519148] env[62730]: INFO nova.scheduler.client.report [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Deleted allocations for instance d90fd82e-a469-41c7-b414-c7eb5554e72a [ 1030.553139] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2f74b784-7250-466e-8ca4-67ed0cb73375 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 471.629s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.554975] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 270.477s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.555933] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Acquiring lock "d90fd82e-a469-41c7-b414-c7eb5554e72a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.556042] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.556561] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.558781] env[62730]: INFO nova.compute.manager [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Terminating instance [ 1030.561823] env[62730]: DEBUG nova.compute.manager [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1030.562076] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1030.562379] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e48481d-e543-4872-b4a4-f0ab7ed50318 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.572014] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c1cba6-b069-472a-afdf-197e076d8faf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.591978] env[62730]: DEBUG nova.compute.manager [None req-89495b67-e092-4b3a-9647-ca4b5239cce4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 73339a8b-3cb0-40b6-a467-e78f58902876] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1030.607240] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d90fd82e-a469-41c7-b414-c7eb5554e72a could not be found. [ 1030.607240] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1030.607240] env[62730]: INFO nova.compute.manager [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1030.607240] env[62730]: DEBUG oslo.service.loopingcall [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.607508] env[62730]: DEBUG nova.compute.manager [-] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1030.607607] env[62730]: DEBUG nova.network.neutron [-] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1030.626210] env[62730]: DEBUG nova.compute.manager [None req-89495b67-e092-4b3a-9647-ca4b5239cce4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] [instance: 73339a8b-3cb0-40b6-a467-e78f58902876] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1030.652397] env[62730]: DEBUG oslo_concurrency.lockutils [None req-89495b67-e092-4b3a-9647-ca4b5239cce4 tempest-MigrationsAdminTest-1545434615 tempest-MigrationsAdminTest-1545434615-project-member] Lock "73339a8b-3cb0-40b6-a467-e78f58902876" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.539s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.665947] env[62730]: DEBUG nova.network.neutron [-] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.666784] env[62730]: DEBUG nova.compute.manager [None req-bcdb84af-6d73-487d-a028-95c2463e0aa3 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] [instance: f945b0e5-e0a5-493f-8fe7-7b3000b1e97b] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1030.674441] env[62730]: INFO nova.compute.manager [-] [instance: d90fd82e-a469-41c7-b414-c7eb5554e72a] Took 0.07 seconds to deallocate network for instance. [ 1030.713640] env[62730]: DEBUG nova.compute.manager [None req-bcdb84af-6d73-487d-a028-95c2463e0aa3 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] [instance: f945b0e5-e0a5-493f-8fe7-7b3000b1e97b] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1030.738358] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.739053] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.748690] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bcdb84af-6d73-487d-a028-95c2463e0aa3 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] Lock "f945b0e5-e0a5-493f-8fe7-7b3000b1e97b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.448s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.751596] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.751967] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.752451] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.752521] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1030.754199] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a124ab72-ce2d-4f3f-806b-43a81543f26f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.765193] env[62730]: DEBUG nova.compute.manager [None req-4a5ac411-17de-49a8-82fe-fcac8203df52 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] [instance: 76ff81c2-500f-4727-9d98-45b57f70eb3a] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1030.770155] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1979fd-218e-4c07-b918-9fce57312735 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.794976] env[62730]: DEBUG nova.compute.manager [None req-4a5ac411-17de-49a8-82fe-fcac8203df52 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] [instance: 76ff81c2-500f-4727-9d98-45b57f70eb3a] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1030.796843] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22038a64-8628-4bf1-bfe4-121c642d7ea6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.800853] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f2508987-5e33-4860-ac16-b1fa16c7d329 tempest-ListImageFiltersTestJSON-1281919080 tempest-ListImageFiltersTestJSON-1281919080-project-member] Lock "d90fd82e-a469-41c7-b414-c7eb5554e72a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.246s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.812096] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e9cd5d-4b59-486d-9ab0-a872d24285c1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.853919] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180524MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1030.854128] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.854318] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.856850] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4a5ac411-17de-49a8-82fe-fcac8203df52 tempest-ServerRescueNegativeTestJSON-309598733 tempest-ServerRescueNegativeTestJSON-309598733-project-member] Lock "76ff81c2-500f-4727-9d98-45b57f70eb3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.667s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.867264] env[62730]: DEBUG nova.compute.manager [None req-9bb91aab-f736-4285-89b3-5a43c479f117 tempest-ServerActionsTestOtherA-895679453 tempest-ServerActionsTestOtherA-895679453-project-member] [instance: c6e1b57d-7dcc-4703-b7f6-e747c7a89204] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1030.937921] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938099] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938243] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938377] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938495] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938612] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938727] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938846] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.938962] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1030.940888] env[62730]: DEBUG nova.compute.manager [None req-9bb91aab-f736-4285-89b3-5a43c479f117 tempest-ServerActionsTestOtherA-895679453 tempest-ServerActionsTestOtherA-895679453-project-member] [instance: c6e1b57d-7dcc-4703-b7f6-e747c7a89204] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1030.950072] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1030.959713] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f0be97b5-35e3-4c67-96f6-c604a71c38b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1030.962674] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9bb91aab-f736-4285-89b3-5a43c479f117 tempest-ServerActionsTestOtherA-895679453 tempest-ServerActionsTestOtherA-895679453-project-member] Lock "c6e1b57d-7dcc-4703-b7f6-e747c7a89204" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.878s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.972187] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6ab13a84-4fcf-451a-a8d7-79ec54af27da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1030.973716] env[62730]: DEBUG nova.compute.manager [None req-cfb26a7c-7f53-4d88-9581-12aa0a39ea67 tempest-ServerAddressesNegativeTestJSON-1349538835 tempest-ServerAddressesNegativeTestJSON-1349538835-project-member] [instance: 0d3756a1-0483-44ae-9790-11627a5b6e02] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1030.982681] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1030.992891] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.004411] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 77b49a77-2048-4812-93bc-aba06586d2a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.005986] env[62730]: DEBUG nova.compute.manager [None req-cfb26a7c-7f53-4d88-9581-12aa0a39ea67 tempest-ServerAddressesNegativeTestJSON-1349538835 tempest-ServerAddressesNegativeTestJSON-1349538835-project-member] [instance: 0d3756a1-0483-44ae-9790-11627a5b6e02] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1031.015828] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8ab13896-dd97-47cc-8013-9fe9dc791ef6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.028018] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 699d4cca-99b8-4517-957b-949afe791aed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.033412] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cfb26a7c-7f53-4d88-9581-12aa0a39ea67 tempest-ServerAddressesNegativeTestJSON-1349538835 tempest-ServerAddressesNegativeTestJSON-1349538835-project-member] Lock "0d3756a1-0483-44ae-9790-11627a5b6e02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.635s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.040402] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.044950] env[62730]: DEBUG nova.compute.manager [None req-ac8876d3-bbbc-43c3-a804-209997d1e333 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 3d002de2-7477-4d62-861f-c2bb273b9b8b] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1031.052222] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 7f22463d-9e8c-4d5b-b30e-86654f34b633 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.066561] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8f51fc3b-205b-41cb-bc95-1f0e694dda76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.073672] env[62730]: DEBUG nova.compute.manager [None req-ac8876d3-bbbc-43c3-a804-209997d1e333 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 3d002de2-7477-4d62-861f-c2bb273b9b8b] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1031.078521] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ffe28344-6909-4252-b899-4a2d66b1d6df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.089432] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 234808e0-4e10-4209-96c0-fa61fe2cdbe3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.099556] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ac8876d3-bbbc-43c3-a804-209997d1e333 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "3d002de2-7477-4d62-861f-c2bb273b9b8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.293s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.101757] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance de6f4f4c-b07a-437e-b01b-e7a7b600fc25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.110799] env[62730]: DEBUG nova.compute.manager [None req-4afaf63c-361a-4ca7-bc1b-cd460e1f1549 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: c104be3c-0108-468a-b99c-f0a3955d4c7f] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1031.115157] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 81f8a8a0-9897-424e-aaa7-02e902b996d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1031.115473] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1031.115610] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '38', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '9', 'num_os_type_None': '9', 'num_proj_e090f6e3fd264211b21d6d8407d12cc7': '1', 'io_workload': '9', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1031.140689] env[62730]: DEBUG nova.compute.manager [None req-4afaf63c-361a-4ca7-bc1b-cd460e1f1549 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: c104be3c-0108-468a-b99c-f0a3955d4c7f] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1031.166707] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4afaf63c-361a-4ca7-bc1b-cd460e1f1549 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "c104be3c-0108-468a-b99c-f0a3955d4c7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.520s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.178029] env[62730]: DEBUG nova.compute.manager [None req-0b53a35a-7c1d-45c7-bb66-00e70b27aaf7 tempest-ServersAdminNegativeTestJSON-625958026 tempest-ServersAdminNegativeTestJSON-625958026-project-member] [instance: 9852654d-352a-4f6a-81b1-48d4399690e9] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1031.210964] env[62730]: DEBUG nova.compute.manager [None req-0b53a35a-7c1d-45c7-bb66-00e70b27aaf7 tempest-ServersAdminNegativeTestJSON-625958026 tempest-ServersAdminNegativeTestJSON-625958026-project-member] [instance: 9852654d-352a-4f6a-81b1-48d4399690e9] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1031.238875] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0b53a35a-7c1d-45c7-bb66-00e70b27aaf7 tempest-ServersAdminNegativeTestJSON-625958026 tempest-ServersAdminNegativeTestJSON-625958026-project-member] Lock "9852654d-352a-4f6a-81b1-48d4399690e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.726s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.256249] env[62730]: DEBUG nova.compute.manager [None req-5dae45e9-78e7-4681-84a1-db7bb9334920 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: 6868e76a-17a5-41d5-81bb-e83747586ffc] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1031.283955] env[62730]: DEBUG nova.compute.manager [None req-5dae45e9-78e7-4681-84a1-db7bb9334920 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: 6868e76a-17a5-41d5-81bb-e83747586ffc] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1031.315522] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5dae45e9-78e7-4681-84a1-db7bb9334920 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "6868e76a-17a5-41d5-81bb-e83747586ffc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.721s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.327673] env[62730]: DEBUG nova.compute.manager [None req-93fb1c9e-6820-4112-b858-4e9dbbe3522f tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] [instance: ec9d765b-adb7-428c-9ab7-4a4cd90baa44] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1031.359081] env[62730]: DEBUG nova.compute.manager [None req-93fb1c9e-6820-4112-b858-4e9dbbe3522f tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] [instance: ec9d765b-adb7-428c-9ab7-4a4cd90baa44] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1031.386114] env[62730]: DEBUG oslo_concurrency.lockutils [None req-93fb1c9e-6820-4112-b858-4e9dbbe3522f tempest-ServersTestMultiNic-379198723 tempest-ServersTestMultiNic-379198723-project-member] Lock "ec9d765b-adb7-428c-9ab7-4a4cd90baa44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.218s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.416237] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1031.484085] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.510783] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a945ecea-cc36-4e60-8055-6ee5cbe5409a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.519184] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986de524-89e8-491b-ad3f-4657336b0289 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.551143] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2483af6e-2f66-446e-bb41-ddde2d5f4705 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.559134] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8354a2-a1ce-414d-8d3a-c3d5e7953dec {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.574683] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.585768] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.600835] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1031.601064] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.747s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.601320] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.118s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.602809] env[62730]: INFO nova.compute.claims [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.604682] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.604964] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1032.604964] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1032.630789] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.630965] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631116] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631248] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631372] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631492] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631611] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631732] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631850] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.631966] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1032.632134] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1032.632803] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.689743] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51e2b94-13ee-45a0-bd3c-f8b181a850de {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.697939] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe80ea3-c5f4-49b1-b11b-87eff398350f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.731130] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd212a03-d6a3-458d-ae00-dc23509451fe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.739247] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcda1b1-bb17-4eca-b369-c3a20437d1d3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.752786] env[62730]: DEBUG nova.compute.provider_tree [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.764591] env[62730]: DEBUG nova.scheduler.client.report [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.781148] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.179s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.781459] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1032.836100] env[62730]: DEBUG nova.compute.utils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1032.841037] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1032.841037] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1032.850280] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1032.925990] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1032.952184] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.952499] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.952695] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.952911] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.953207] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.953412] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.953748] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.953826] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.954054] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.954261] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.954470] env[62730]: DEBUG nova.virt.hardware [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.955358] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ea6852-6091-4659-84dd-1256dbe4c8f3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.964484] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25eddbd8-4b53-47e3-a560-29aace6d000a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.979976] env[62730]: DEBUG nova.policy [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ab1858dda2b4a658496288fa4ee1262', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c54046535dc74172a58cc8e350f2d88d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1033.796009] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Successfully created port: 8660ec51-2185-4424-9b1d-32201dade4db {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.193082] env[62730]: DEBUG nova.compute.manager [req-4424d44e-c5cf-4d6a-92d4-f4202a2cd107 req-537ba18b-4e14-4556-ac6f-f4632431b033 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Received event network-vif-plugged-8660ec51-2185-4424-9b1d-32201dade4db {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1035.193392] env[62730]: DEBUG oslo_concurrency.lockutils [req-4424d44e-c5cf-4d6a-92d4-f4202a2cd107 req-537ba18b-4e14-4556-ac6f-f4632431b033 service nova] Acquiring lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.193615] env[62730]: DEBUG oslo_concurrency.lockutils [req-4424d44e-c5cf-4d6a-92d4-f4202a2cd107 req-537ba18b-4e14-4556-ac6f-f4632431b033 service nova] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.193701] env[62730]: DEBUG oslo_concurrency.lockutils [req-4424d44e-c5cf-4d6a-92d4-f4202a2cd107 req-537ba18b-4e14-4556-ac6f-f4632431b033 service nova] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.193871] env[62730]: DEBUG nova.compute.manager [req-4424d44e-c5cf-4d6a-92d4-f4202a2cd107 req-537ba18b-4e14-4556-ac6f-f4632431b033 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] No waiting events found dispatching network-vif-plugged-8660ec51-2185-4424-9b1d-32201dade4db {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1035.194059] env[62730]: WARNING nova.compute.manager [req-4424d44e-c5cf-4d6a-92d4-f4202a2cd107 req-537ba18b-4e14-4556-ac6f-f4632431b033 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Received unexpected event network-vif-plugged-8660ec51-2185-4424-9b1d-32201dade4db for instance with vm_state building and task_state spawning. [ 1035.240408] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Successfully updated port: 8660ec51-2185-4424-9b1d-32201dade4db {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.256351] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "refresh_cache-4a830a6a-d473-4ae4-858e-2330e42f8c9e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.256516] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquired lock "refresh_cache-4a830a6a-d473-4ae4-858e-2330e42f8c9e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.256770] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1035.309246] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1035.669699] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Updating instance_info_cache with network_info: [{"id": "8660ec51-2185-4424-9b1d-32201dade4db", "address": "fa:16:3e:d0:c5:7e", "network": {"id": "45b9337a-8975-4c88-88c8-4bf3e9331b26", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-112732988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c54046535dc74172a58cc8e350f2d88d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8660ec51-21", "ovs_interfaceid": "8660ec51-2185-4424-9b1d-32201dade4db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.685536] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Releasing lock "refresh_cache-4a830a6a-d473-4ae4-858e-2330e42f8c9e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.685847] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Instance network_info: |[{"id": "8660ec51-2185-4424-9b1d-32201dade4db", "address": "fa:16:3e:d0:c5:7e", "network": {"id": "45b9337a-8975-4c88-88c8-4bf3e9331b26", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-112732988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c54046535dc74172a58cc8e350f2d88d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8660ec51-21", "ovs_interfaceid": "8660ec51-2185-4424-9b1d-32201dade4db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1035.686313] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:c5:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8660ec51-2185-4424-9b1d-32201dade4db', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.694806] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Creating folder: Project (c54046535dc74172a58cc8e350f2d88d). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1035.695428] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17d8505c-6480-45ca-b2c9-74e42f844e49 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.707433] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Created folder: Project (c54046535dc74172a58cc8e350f2d88d) in parent group-v942928. [ 1035.707631] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Creating folder: Instances. Parent ref: group-v942980. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1035.707873] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93517bbb-3b9c-48a2-97a4-7df436cf59b0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.718500] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Created folder: Instances in parent group-v942980. [ 1035.718777] env[62730]: DEBUG oslo.service.loopingcall [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.718967] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1035.719192] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46cbf72e-53b5-42a1-858d-697eae2f739e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.739313] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.739313] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1035.739504] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.739638] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.743318] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.743318] env[62730]: value = "task-4837134" [ 1035.743318] env[62730]: _type = "Task" [ 1035.743318] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.751652] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837134, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.254066] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837134, 'name': CreateVM_Task, 'duration_secs': 0.317108} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.254066] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1036.254861] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.255107] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.255499] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1036.255807] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbb79e3b-2534-4cce-b965-c3be294e5719 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.261674] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Waiting for the task: (returnval){ [ 1036.261674] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ca8aaf-9325-f64f-6998-dee9d67ad0f2" [ 1036.261674] env[62730]: _type = "Task" [ 1036.261674] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.273876] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52ca8aaf-9325-f64f-6998-dee9d67ad0f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.772590] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.772856] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.773109] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.219987] env[62730]: DEBUG nova.compute.manager [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Received event network-changed-8660ec51-2185-4424-9b1d-32201dade4db {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1037.220235] env[62730]: DEBUG nova.compute.manager [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Refreshing instance network info cache due to event network-changed-8660ec51-2185-4424-9b1d-32201dade4db. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1037.220456] env[62730]: DEBUG oslo_concurrency.lockutils [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] Acquiring lock "refresh_cache-4a830a6a-d473-4ae4-858e-2330e42f8c9e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.220589] env[62730]: DEBUG oslo_concurrency.lockutils [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] Acquired lock "refresh_cache-4a830a6a-d473-4ae4-858e-2330e42f8c9e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.220747] env[62730]: DEBUG nova.network.neutron [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Refreshing network info cache for port 8660ec51-2185-4424-9b1d-32201dade4db {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1037.668045] env[62730]: DEBUG nova.network.neutron [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Updated VIF entry in instance network info cache for port 8660ec51-2185-4424-9b1d-32201dade4db. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1037.668398] env[62730]: DEBUG nova.network.neutron [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Updating instance_info_cache with network_info: [{"id": "8660ec51-2185-4424-9b1d-32201dade4db", "address": "fa:16:3e:d0:c5:7e", "network": {"id": "45b9337a-8975-4c88-88c8-4bf3e9331b26", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-112732988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c54046535dc74172a58cc8e350f2d88d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8660ec51-21", "ovs_interfaceid": "8660ec51-2185-4424-9b1d-32201dade4db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.678755] env[62730]: DEBUG oslo_concurrency.lockutils [req-339c8aa9-e282-4b40-b777-c34ba4e40277 req-58e333e9-ca5a-4549-9203-cf5fe6e98047 service nova] Releasing lock "refresh_cache-4a830a6a-d473-4ae4-858e-2330e42f8c9e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.409761] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.960681] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a7135e55-50b8-46b6-89ad-bdd8da822381 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] Acquiring lock "6a4c0163-c6e3-406d-bcb4-5baf627433e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.960980] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a7135e55-50b8-46b6-89ad-bdd8da822381 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] Lock "6a4c0163-c6e3-406d-bcb4-5baf627433e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.103106] env[62730]: DEBUG oslo_concurrency.lockutils [None req-da544af3-5f75-4475-8099-38512e89efc9 tempest-ServerActionsV293TestJSON-1480316407 tempest-ServerActionsV293TestJSON-1480316407-project-member] Acquiring lock "87a01d32-4dcc-4e97-a39c-d48c146c18fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.103417] env[62730]: DEBUG oslo_concurrency.lockutils [None req-da544af3-5f75-4475-8099-38512e89efc9 tempest-ServerActionsV293TestJSON-1480316407 tempest-ServerActionsV293TestJSON-1480316407-project-member] Lock "87a01d32-4dcc-4e97-a39c-d48c146c18fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.133101] env[62730]: WARNING oslo_vmware.rw_handles [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1078.133101] env[62730]: ERROR oslo_vmware.rw_handles [ 1078.133711] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1078.135531] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1078.135779] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Copying Virtual Disk [datastore2] vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/6389433f-b40f-45a4-8afb-fc5e74b092df/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1078.136060] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f9076ef-9721-4879-a48b-68cfad5a04e9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.144901] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Waiting for the task: (returnval){ [ 1078.144901] env[62730]: value = "task-4837146" [ 1078.144901] env[62730]: _type = "Task" [ 1078.144901] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.153144] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Task: {'id': task-4837146, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.655614] env[62730]: DEBUG oslo_vmware.exceptions [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1078.655914] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.656498] env[62730]: ERROR nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1078.656498] env[62730]: Faults: ['InvalidArgument'] [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Traceback (most recent call last): [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] yield resources [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self.driver.spawn(context, instance, image_meta, [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self._fetch_image_if_missing(context, vi) [ 1078.656498] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] image_cache(vi, tmp_image_ds_loc) [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] vm_util.copy_virtual_disk( [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] session._wait_for_task(vmdk_copy_task) [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] return self.wait_for_task(task_ref) [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] return evt.wait() [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] result = hub.switch() [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1078.656929] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] return self.greenlet.switch() [ 1078.657322] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1078.657322] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self.f(*self.args, **self.kw) [ 1078.657322] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1078.657322] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] raise exceptions.translate_fault(task_info.error) [ 1078.657322] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1078.657322] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Faults: ['InvalidArgument'] [ 1078.657322] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] [ 1078.657322] env[62730]: INFO nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Terminating instance [ 1078.658513] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.658655] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.659265] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1078.659456] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1078.659689] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edc53d32-3178-4840-b523-f8b20046ab3f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.662053] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9910dd-c796-4a13-a1dd-4c8350f2fc86 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.669733] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1078.669988] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbe3ec2a-ed2c-441e-9eb4-96600d30586a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.672616] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.672798] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1078.673921] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-243cfc9e-8254-4fb2-801e-879bb7a2f075 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.680054] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Waiting for the task: (returnval){ [ 1078.680054] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52559f36-1128-fd71-069b-a7157a0441cd" [ 1078.680054] env[62730]: _type = "Task" [ 1078.680054] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.688792] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52559f36-1128-fd71-069b-a7157a0441cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.758650] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1078.758985] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1078.759205] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Deleting the datastore file [datastore2] 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.759482] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c283f24-43da-414c-b624-aa85ef0b47e5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.766437] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Waiting for the task: (returnval){ [ 1078.766437] env[62730]: value = "task-4837148" [ 1078.766437] env[62730]: _type = "Task" [ 1078.766437] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.775278] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Task: {'id': task-4837148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.190440] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1079.190741] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Creating directory with path [datastore2] vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.190878] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d1b94b5-f0a5-4d78-a77c-51776d28eaa4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.203233] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Created directory with path [datastore2] vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.203433] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Fetch image to [datastore2] vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1079.204529] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1079.204529] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100fe6e9-28b7-44a1-9ce1-bfad83af0918 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.212867] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16932f3c-6897-4633-b48d-62ffbddb0c5d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.223111] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618c80d2-3116-484c-b10e-7bde1a27f8c4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.255516] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23c2105-ae33-41a8-957f-f7d91d4888ad {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.262368] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2ae2f0bb-95f4-42ac-aee4-3ae7a115706f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.276178] env[62730]: DEBUG oslo_vmware.api [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Task: {'id': task-4837148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070271} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.276428] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.276611] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1079.276785] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1079.276958] env[62730]: INFO nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1079.279319] env[62730]: DEBUG nova.compute.claims [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1079.279503] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.279725] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.284866] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1079.340706] env[62730]: DEBUG oslo_vmware.rw_handles [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1079.400410] env[62730]: DEBUG oslo_vmware.rw_handles [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1079.400700] env[62730]: DEBUG oslo_vmware.rw_handles [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1079.722737] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10626b55-38d5-4a64-9ce6-6ed3559c1433 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.731780] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0a5324-c261-4c27-8347-fb40f9a5b70a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.761047] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67358cd-654d-4524-b119-9312b7db5605 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.769253] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72ddcde-290c-48cb-9b28-98c26746fb2d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.784688] env[62730]: DEBUG nova.compute.provider_tree [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.796271] env[62730]: DEBUG nova.scheduler.client.report [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1079.812078] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.532s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.812633] env[62730]: ERROR nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1079.812633] env[62730]: Faults: ['InvalidArgument'] [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Traceback (most recent call last): [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self.driver.spawn(context, instance, image_meta, [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self._fetch_image_if_missing(context, vi) [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] image_cache(vi, tmp_image_ds_loc) [ 1079.812633] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] vm_util.copy_virtual_disk( [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] session._wait_for_task(vmdk_copy_task) [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] return self.wait_for_task(task_ref) [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] return evt.wait() [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] result = hub.switch() [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] return self.greenlet.switch() [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1079.812952] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] self.f(*self.args, **self.kw) [ 1079.813342] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1079.813342] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] raise exceptions.translate_fault(task_info.error) [ 1079.813342] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1079.813342] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Faults: ['InvalidArgument'] [ 1079.813342] env[62730]: ERROR nova.compute.manager [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] [ 1079.813342] env[62730]: DEBUG nova.compute.utils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1079.815083] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Build of instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 was re-scheduled: A specified parameter was not correct: fileType [ 1079.815083] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1079.815471] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1079.815647] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1079.815821] env[62730]: DEBUG nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1079.815983] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1080.435903] env[62730]: DEBUG nova.network.neutron [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.453573] env[62730]: INFO nova.compute.manager [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Took 0.64 seconds to deallocate network for instance. [ 1080.558646] env[62730]: INFO nova.scheduler.client.report [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Deleted allocations for instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 [ 1080.591655] env[62730]: DEBUG oslo_concurrency.lockutils [None req-252d38b6-527e-4849-af26-37573ea10113 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 520.479s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.593070] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 319.750s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.594766] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Acquiring lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.594766] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.594766] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.596142] env[62730]: INFO nova.compute.manager [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Terminating instance [ 1080.598133] env[62730]: DEBUG nova.compute.manager [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1080.598347] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1080.598884] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a19cedf-3f4b-413f-91ef-b714db6c2f44 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.608889] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcdce20-4f7c-40d2-a972-c94938ba1500 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.620632] env[62730]: DEBUG nova.compute.manager [None req-2d117a17-01b8-428b-838c-302cb0d3cbf5 tempest-FloatingIPsAssociationTestJSON-661442993 tempest-FloatingIPsAssociationTestJSON-661442993-project-member] [instance: f0be97b5-35e3-4c67-96f6-c604a71c38b1] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1080.644110] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5 could not be found. [ 1080.644427] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1080.644510] env[62730]: INFO nova.compute.manager [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1080.644840] env[62730]: DEBUG oslo.service.loopingcall [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1080.645112] env[62730]: DEBUG nova.compute.manager [-] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1080.645205] env[62730]: DEBUG nova.network.neutron [-] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1080.649209] env[62730]: DEBUG nova.compute.manager [None req-2d117a17-01b8-428b-838c-302cb0d3cbf5 tempest-FloatingIPsAssociationTestJSON-661442993 tempest-FloatingIPsAssociationTestJSON-661442993-project-member] [instance: f0be97b5-35e3-4c67-96f6-c604a71c38b1] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1080.674291] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2d117a17-01b8-428b-838c-302cb0d3cbf5 tempest-FloatingIPsAssociationTestJSON-661442993 tempest-FloatingIPsAssociationTestJSON-661442993-project-member] Lock "f0be97b5-35e3-4c67-96f6-c604a71c38b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.006s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.674547] env[62730]: DEBUG nova.network.neutron [-] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.683043] env[62730]: INFO nova.compute.manager [-] [instance: 1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5] Took 0.04 seconds to deallocate network for instance. [ 1080.684999] env[62730]: DEBUG nova.compute.manager [None req-5fecfe18-6bc6-4822-a5e4-23062e9bbed2 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] [instance: 6ab13a84-4fcf-451a-a8d7-79ec54af27da] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1080.709549] env[62730]: DEBUG nova.compute.manager [None req-5fecfe18-6bc6-4822-a5e4-23062e9bbed2 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] [instance: 6ab13a84-4fcf-451a-a8d7-79ec54af27da] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1080.733037] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fecfe18-6bc6-4822-a5e4-23062e9bbed2 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] Lock "6ab13a84-4fcf-451a-a8d7-79ec54af27da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.977s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.753947] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1080.815504] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b778f5e-0e55-4f49-8dba-3efbfd5447a9 tempest-ImagesNegativeTestJSON-1262021261 tempest-ImagesNegativeTestJSON-1262021261-project-member] Lock "1e4cc6b4-5726-4bf5-b7ac-0640ba255ce5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.222s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.833294] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.833611] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.835236] env[62730]: INFO nova.compute.claims [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.172131] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f353ea-0fb2-40a8-9ddf-989b6423f316 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.180397] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa9fd9a-d026-4649-83d5-859f3d49d46b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.209813] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f910e66-0836-4375-b209-765b212527a7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.222330] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4012f03e-fa49-4b5b-8b21-14bfac42505d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.237017] env[62730]: DEBUG nova.compute.provider_tree [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.247349] env[62730]: DEBUG nova.scheduler.client.report [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1081.261963] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.428s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.262507] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1081.296201] env[62730]: DEBUG nova.compute.utils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1081.298357] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1081.298534] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1081.307405] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1081.375634] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1081.401693] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1081.401979] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1081.402156] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.402338] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1081.402487] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.402635] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1081.402842] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1081.403008] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1081.403184] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1081.403345] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1081.403518] env[62730]: DEBUG nova.virt.hardware [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1081.404426] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ead7a41-b99a-4e08-bb99-670ce5f28c2a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.413023] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a44ca03-65e3-4d69-8dae-a467bb5462aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.418246] env[62730]: DEBUG nova.policy [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f707fd90c7ec4d9eb6a90fbbd7f51499', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '51dab0b2d3a645f989f127257241fd91', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1081.935478] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Successfully created port: 6cda002b-19fe-4b21-a624-882b4f052d9d {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1082.738673] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1082.945253] env[62730]: DEBUG nova.compute.manager [req-2800675d-9dfa-4e0b-87cb-43779a9e2114 req-b79b089e-bec6-4646-b90e-8cf93d3bd96a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Received event network-vif-plugged-6cda002b-19fe-4b21-a624-882b4f052d9d {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1082.945519] env[62730]: DEBUG oslo_concurrency.lockutils [req-2800675d-9dfa-4e0b-87cb-43779a9e2114 req-b79b089e-bec6-4646-b90e-8cf93d3bd96a service nova] Acquiring lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.945705] env[62730]: DEBUG oslo_concurrency.lockutils [req-2800675d-9dfa-4e0b-87cb-43779a9e2114 req-b79b089e-bec6-4646-b90e-8cf93d3bd96a service nova] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.945873] env[62730]: DEBUG oslo_concurrency.lockutils [req-2800675d-9dfa-4e0b-87cb-43779a9e2114 req-b79b089e-bec6-4646-b90e-8cf93d3bd96a service nova] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.946471] env[62730]: DEBUG nova.compute.manager [req-2800675d-9dfa-4e0b-87cb-43779a9e2114 req-b79b089e-bec6-4646-b90e-8cf93d3bd96a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] No waiting events found dispatching network-vif-plugged-6cda002b-19fe-4b21-a624-882b4f052d9d {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1082.946800] env[62730]: WARNING nova.compute.manager [req-2800675d-9dfa-4e0b-87cb-43779a9e2114 req-b79b089e-bec6-4646-b90e-8cf93d3bd96a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Received unexpected event network-vif-plugged-6cda002b-19fe-4b21-a624-882b4f052d9d for instance with vm_state building and task_state spawning. [ 1083.245532] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Successfully updated port: 6cda002b-19fe-4b21-a624-882b4f052d9d {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1083.263470] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "refresh_cache-c2ac09ea-97ae-4e73-9ecb-010241e231f9" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.263659] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquired lock "refresh_cache-c2ac09ea-97ae-4e73-9ecb-010241e231f9" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.263818] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1083.345269] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1083.726925] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Updating instance_info_cache with network_info: [{"id": "6cda002b-19fe-4b21-a624-882b4f052d9d", "address": "fa:16:3e:10:cb:6b", "network": {"id": "a70aed4d-fc0c-4ef9-ad0d-7bc60fb7104c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-254595705-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51dab0b2d3a645f989f127257241fd91", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cda002b-19", "ovs_interfaceid": "6cda002b-19fe-4b21-a624-882b4f052d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.741847] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Releasing lock "refresh_cache-c2ac09ea-97ae-4e73-9ecb-010241e231f9" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.742182] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Instance network_info: |[{"id": "6cda002b-19fe-4b21-a624-882b4f052d9d", "address": "fa:16:3e:10:cb:6b", "network": {"id": "a70aed4d-fc0c-4ef9-ad0d-7bc60fb7104c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-254595705-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51dab0b2d3a645f989f127257241fd91", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cda002b-19", "ovs_interfaceid": "6cda002b-19fe-4b21-a624-882b4f052d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1083.742835] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:cb:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6cda002b-19fe-4b21-a624-882b4f052d9d', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.750356] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Creating folder: Project (51dab0b2d3a645f989f127257241fd91). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1083.750959] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ca8d20c-9db2-4fc3-b727-12525d28e0af {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.763825] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Created folder: Project (51dab0b2d3a645f989f127257241fd91) in parent group-v942928. [ 1083.764057] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Creating folder: Instances. Parent ref: group-v942987. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1083.764311] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4372aa9-fc65-4615-858d-ae0cf3329336 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.775917] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Created folder: Instances in parent group-v942987. [ 1083.776176] env[62730]: DEBUG oslo.service.loopingcall [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.776372] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1083.776578] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7d56205-d190-4e4f-8ef2-0c47d7b23954 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.796466] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.796466] env[62730]: value = "task-4837151" [ 1083.796466] env[62730]: _type = "Task" [ 1083.796466] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.804691] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837151, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.311343] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837151, 'name': CreateVM_Task, 'duration_secs': 0.306843} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.311713] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1084.312599] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.312866] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.313362] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1084.313730] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d85a12f-6121-442b-b19b-57b99aed10ad {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.319807] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Waiting for the task: (returnval){ [ 1084.319807] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52afc9c8-434c-0dd6-e262-8bf83f49b59d" [ 1084.319807] env[62730]: _type = "Task" [ 1084.319807] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.333409] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52afc9c8-434c-0dd6-e262-8bf83f49b59d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.830731] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.831331] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.831331] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.069350] env[62730]: DEBUG nova.compute.manager [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Received event network-changed-6cda002b-19fe-4b21-a624-882b4f052d9d {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1085.069350] env[62730]: DEBUG nova.compute.manager [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Refreshing instance network info cache due to event network-changed-6cda002b-19fe-4b21-a624-882b4f052d9d. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1085.069350] env[62730]: DEBUG oslo_concurrency.lockutils [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] Acquiring lock "refresh_cache-c2ac09ea-97ae-4e73-9ecb-010241e231f9" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.069350] env[62730]: DEBUG oslo_concurrency.lockutils [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] Acquired lock "refresh_cache-c2ac09ea-97ae-4e73-9ecb-010241e231f9" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.069350] env[62730]: DEBUG nova.network.neutron [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Refreshing network info cache for port 6cda002b-19fe-4b21-a624-882b4f052d9d {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1085.607356] env[62730]: DEBUG nova.network.neutron [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Updated VIF entry in instance network info cache for port 6cda002b-19fe-4b21-a624-882b4f052d9d. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1085.607736] env[62730]: DEBUG nova.network.neutron [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Updating instance_info_cache with network_info: [{"id": "6cda002b-19fe-4b21-a624-882b4f052d9d", "address": "fa:16:3e:10:cb:6b", "network": {"id": "a70aed4d-fc0c-4ef9-ad0d-7bc60fb7104c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-254595705-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51dab0b2d3a645f989f127257241fd91", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cda002b-19", "ovs_interfaceid": "6cda002b-19fe-4b21-a624-882b4f052d9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.623098] env[62730]: DEBUG oslo_concurrency.lockutils [req-d6dbb2a9-4c20-4d1b-92db-f420a84e86d4 req-48800b72-b2ab-494b-bc05-9b8e93b4594a service nova] Releasing lock "refresh_cache-c2ac09ea-97ae-4e73-9ecb-010241e231f9" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.746327] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.737385] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.737626] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.737932] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances with incomplete migration {{(pid=62730) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1088.755593] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.737795] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.737795] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.856395] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "e8657fe0-3db2-4768-817f-944a736da401" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.856673] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "e8657fe0-3db2-4768-817f-944a736da401" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.733114] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.736855] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.737087] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1091.737171] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1091.759417] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.759785] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.759785] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.759912] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.760031] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.760235] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.760377] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.760500] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.760619] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.760737] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1091.760857] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1091.761414] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.775023] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.775284] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.775459] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.775620] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1091.776763] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1498bd15-ffc0-43f3-817d-10876baff7ff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.785888] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b53a57a-a635-4857-baba-50cf7d910767 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.801170] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916479f6-4f20-4924-b72a-82ee0ab8584b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.808644] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcef104-1c9b-478b-840e-0b30a6f00af8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.840142] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180555MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1091.840333] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.840509] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.995575] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.995775] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.995848] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.995932] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.996095] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.996194] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.996455] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.996518] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.996640] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1091.996761] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1092.014881] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.031313] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 77b49a77-2048-4812-93bc-aba06586d2a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.043238] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8ab13896-dd97-47cc-8013-9fe9dc791ef6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.054147] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 699d4cca-99b8-4517-957b-949afe791aed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.067030] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.078998] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 7f22463d-9e8c-4d5b-b30e-86654f34b633 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.092280] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8f51fc3b-205b-41cb-bc95-1f0e694dda76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.103859] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ffe28344-6909-4252-b899-4a2d66b1d6df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.115272] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 234808e0-4e10-4209-96c0-fa61fe2cdbe3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.126207] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance de6f4f4c-b07a-437e-b01b-e7a7b600fc25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.137987] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 81f8a8a0-9897-424e-aaa7-02e902b996d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.148987] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.160491] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6a4c0163-c6e3-406d-bcb4-5baf627433e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.171419] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 87a01d32-4dcc-4e97-a39c-d48c146c18fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.182641] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1092.182917] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1092.183096] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '48', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_534bb3f3d10946c8a3b9d3100be143cf': '1', 'io_workload': '10', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_c54046535dc74172a58cc8e350f2d88d': '1', 'num_task_spawning': '1', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1092.201395] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing inventories for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1092.216626] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating ProviderTree inventory for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1092.216837] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.229561] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing aggregate associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, aggregates: None {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1092.249378] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing trait associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1092.603037] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf61d4ab-ec70-4817-8c46-25409f1f2512 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.611063] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e165e7-5471-484d-b713-7105a17b1c74 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.641644] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9d00df-c74d-473f-9dfe-90f0eb12f62c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.649933] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d531cd-bc9a-4296-90e2-918ff93d3218 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.664299] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.673390] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1092.689024] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1092.689174] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.849s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.665634] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.005374] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.029658] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Getting list of instances from cluster (obj){ [ 1094.029658] env[62730]: value = "domain-c8" [ 1094.029658] env[62730]: _type = "ClusterComputeResource" [ 1094.029658] env[62730]: } {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1094.031267] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36989f8-7dc9-49bd-b63f-900dd5658f4b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.048599] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Got total of 10 instances {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1094.048823] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid cbdca8b1-7929-4d2c-860c-2b74826d1d11 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.048992] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 540af840-eba5-4cee-a37c-6d6809a24f95 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.049174] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 986e37d4-d3ae-42a0-8caa-39b92636b973 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.049335] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 91052772-87d4-4fb3-b590-f071c0419196 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.049495] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 04ba035f-97b6-49d1-8506-35f7d6fccb03 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.049650] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.049815] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 22f72732-e5e2-49dc-810a-ab90d7a367a0 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.049987] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 01a34662-fef9-4855-ba3c-39184982fd0e {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.050160] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 4a830a6a-d473-4ae4-858e-2330e42f8c9e {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.050313] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid c2ac09ea-97ae-4e73-9ecb-010241e231f9 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1094.050635] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.050870] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "540af840-eba5-4cee-a37c-6d6809a24f95" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.051093] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "986e37d4-d3ae-42a0-8caa-39b92636b973" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.051351] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "91052772-87d4-4fb3-b590-f071c0419196" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.051564] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.051764] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.051994] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.052255] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "01a34662-fef9-4855-ba3c-39184982fd0e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.052465] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.052703] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.738055] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.738420] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1094.751605] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] There are 0 instances to clean {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1097.751216] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.751633] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1128.153238] env[62730]: WARNING oslo_vmware.rw_handles [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1128.153238] env[62730]: ERROR oslo_vmware.rw_handles [ 1128.154010] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1128.156071] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1128.156323] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Copying Virtual Disk [datastore2] vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/83404dc1-50ba-4f3f-8fb9-257c01979880/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1128.156610] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50d9b288-1b7e-46ee-b8d3-bf98171a2064 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.165919] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Waiting for the task: (returnval){ [ 1128.165919] env[62730]: value = "task-4837152" [ 1128.165919] env[62730]: _type = "Task" [ 1128.165919] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.174916] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Task: {'id': task-4837152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.677673] env[62730]: DEBUG oslo_vmware.exceptions [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1128.677673] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.677994] env[62730]: ERROR nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1128.677994] env[62730]: Faults: ['InvalidArgument'] [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Traceback (most recent call last): [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] yield resources [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self.driver.spawn(context, instance, image_meta, [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self._fetch_image_if_missing(context, vi) [ 1128.677994] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] image_cache(vi, tmp_image_ds_loc) [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] vm_util.copy_virtual_disk( [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] session._wait_for_task(vmdk_copy_task) [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] return self.wait_for_task(task_ref) [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] return evt.wait() [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] result = hub.switch() [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1128.678433] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] return self.greenlet.switch() [ 1128.678820] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1128.678820] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self.f(*self.args, **self.kw) [ 1128.678820] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1128.678820] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] raise exceptions.translate_fault(task_info.error) [ 1128.678820] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1128.678820] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Faults: ['InvalidArgument'] [ 1128.678820] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] [ 1128.678820] env[62730]: INFO nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Terminating instance [ 1128.679929] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.680578] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.680578] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d52f3897-7d5a-4587-bf39-a20f77f329a9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.682888] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1128.683108] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1128.683839] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5479bff1-e820-42de-b552-7cf34a585c9b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.691465] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1128.691716] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62c841ff-9a36-41db-a14a-166efeb1e13c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.694128] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.694311] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1128.695375] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd601729-91eb-4e7a-a212-de747c513f6b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.701377] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Waiting for the task: (returnval){ [ 1128.701377] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5264077a-cbf6-d79c-9a28-b178ec509a02" [ 1128.701377] env[62730]: _type = "Task" [ 1128.701377] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.710175] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5264077a-cbf6-d79c-9a28-b178ec509a02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.782419] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1128.782666] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1128.782856] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Deleting the datastore file [datastore2] cbdca8b1-7929-4d2c-860c-2b74826d1d11 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1128.783172] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2aeed89-b666-4f82-9e2b-84cc048f86a8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.791180] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Waiting for the task: (returnval){ [ 1128.791180] env[62730]: value = "task-4837154" [ 1128.791180] env[62730]: _type = "Task" [ 1128.791180] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.800086] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Task: {'id': task-4837154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.212108] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1129.212474] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Creating directory with path [datastore2] vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1129.212591] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4c5dc93-00f7-465e-8568-0ff459404c39 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.223872] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Created directory with path [datastore2] vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1129.224094] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Fetch image to [datastore2] vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1129.224252] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1129.224973] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4aaede9-7779-45b2-b5d9-b26967643bcb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.232156] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1d99ce-038c-4a0b-b7b5-d7f711d1fbe3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.241339] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fc2307-d443-404c-93f2-9108ab01e6fb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.272882] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826ff8d2-e054-4312-816f-5c37cd4ca1d6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.279313] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-663e0a3d-4c7b-4d01-b080-6aac33ea4776 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.300581] env[62730]: DEBUG oslo_vmware.api [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Task: {'id': task-4837154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070677} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.302211] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1129.302412] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1129.302591] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1129.302799] env[62730]: INFO nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1129.304660] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1129.306835] env[62730]: DEBUG nova.compute.claims [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1129.307014] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.307242] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.361912] env[62730]: DEBUG oslo_vmware.rw_handles [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1129.420760] env[62730]: DEBUG oslo_vmware.rw_handles [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1129.420878] env[62730]: DEBUG oslo_vmware.rw_handles [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1129.713462] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fa49de-aa24-4ebf-bb63-bafe5d553c14 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.721940] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341ac6a0-5cd9-4eac-bb80-457c91ec7444 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.752495] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6096bcb-df9c-4f3e-bac6-30b20d4d1aef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.760438] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b093a788-f871-4c3c-af3f-cd8a62031234 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.774092] env[62730]: DEBUG nova.compute.provider_tree [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.784431] env[62730]: DEBUG nova.scheduler.client.report [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1129.802386] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.495s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.803149] env[62730]: ERROR nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1129.803149] env[62730]: Faults: ['InvalidArgument'] [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Traceback (most recent call last): [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self.driver.spawn(context, instance, image_meta, [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self._fetch_image_if_missing(context, vi) [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] image_cache(vi, tmp_image_ds_loc) [ 1129.803149] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] vm_util.copy_virtual_disk( [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] session._wait_for_task(vmdk_copy_task) [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] return self.wait_for_task(task_ref) [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] return evt.wait() [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] result = hub.switch() [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] return self.greenlet.switch() [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1129.803517] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] self.f(*self.args, **self.kw) [ 1129.803842] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1129.803842] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] raise exceptions.translate_fault(task_info.error) [ 1129.803842] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1129.803842] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Faults: ['InvalidArgument'] [ 1129.803842] env[62730]: ERROR nova.compute.manager [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] [ 1129.803968] env[62730]: DEBUG nova.compute.utils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1129.807259] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Build of instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 was re-scheduled: A specified parameter was not correct: fileType [ 1129.807259] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1129.807719] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1129.807948] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1129.808178] env[62730]: DEBUG nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1129.808568] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1130.378196] env[62730]: DEBUG nova.network.neutron [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.389514] env[62730]: INFO nova.compute.manager [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Took 0.58 seconds to deallocate network for instance. [ 1130.489236] env[62730]: INFO nova.scheduler.client.report [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Deleted allocations for instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 [ 1130.511767] env[62730]: DEBUG oslo_concurrency.lockutils [None req-58652d3c-99a9-4dfe-9576-8b5e2d1d7e92 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 569.290s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.512949] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 369.452s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.513196] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Acquiring lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.513415] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.514112] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.515505] env[62730]: INFO nova.compute.manager [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Terminating instance [ 1130.517694] env[62730]: DEBUG nova.compute.manager [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1130.517888] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1130.518159] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4eccc5e8-e475-48ef-bcdd-750bfcd8150f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.525912] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1130.531283] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3007dbc6-b8d9-4569-9baa-a29ac8bf46d0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.569414] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cbdca8b1-7929-4d2c-860c-2b74826d1d11 could not be found. [ 1130.569580] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1130.569846] env[62730]: INFO nova.compute.manager [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1130.570258] env[62730]: DEBUG oslo.service.loopingcall [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1130.570878] env[62730]: DEBUG nova.compute.manager [-] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1130.570878] env[62730]: DEBUG nova.network.neutron [-] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1130.597819] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.598097] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.599718] env[62730]: INFO nova.compute.claims [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.603238] env[62730]: DEBUG nova.network.neutron [-] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.610179] env[62730]: INFO nova.compute.manager [-] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] Took 0.04 seconds to deallocate network for instance. [ 1130.742817] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bbb131f7-3224-46c8-81dc-72265574e857 tempest-ServersWithSpecificFlavorTestJSON-271357913 tempest-ServersWithSpecificFlavorTestJSON-271357913-project-member] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.230s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.743776] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 36.693s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.743975] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: cbdca8b1-7929-4d2c-860c-2b74826d1d11] During sync_power_state the instance has a pending task (deleting). Skip. [ 1130.744177] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "cbdca8b1-7929-4d2c-860c-2b74826d1d11" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.111568] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c051a4b9-3810-4e09-a58a-69a3a85ba203 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.120977] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c29fb4-c7df-4c5b-8733-adad9d2ad5f6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.150586] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1669dbf5-561d-4fe5-9e46-8c19a0711c42 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.158365] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c912411-38e1-4284-92d4-230620c6bf5a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.171398] env[62730]: DEBUG nova.compute.provider_tree [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.180422] env[62730]: DEBUG nova.scheduler.client.report [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1131.195993] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.598s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.196220] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1131.232076] env[62730]: DEBUG nova.compute.utils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1131.233926] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1131.235024] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1131.243402] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1131.331883] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1131.363395] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1131.363667] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1131.363845] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1131.364037] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1131.364153] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1131.364328] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1131.364608] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1131.364755] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1131.364875] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1131.368249] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1131.368510] env[62730]: DEBUG nova.virt.hardware [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1131.369704] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a320fd44-c3b3-4cb0-857d-e01bee93b23e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.378776] env[62730]: DEBUG nova.policy [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f6f0c96261944aa91e1e3f9806b1025', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca2739fcb8b4c7db333ac9aa362ca50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1131.381274] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fd64d1-a562-4e1d-98e7-7c735b2a3b1b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.783950] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Successfully created port: 44fe65f7-b218-49ba-9d5d-231b8869fb06 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1132.849150] env[62730]: DEBUG nova.compute.manager [req-3e751b2e-ba0b-430f-9268-431681ed3477 req-d9f1e5a7-e548-4f3e-afe9-07d2baff4f81 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Received event network-vif-plugged-44fe65f7-b218-49ba-9d5d-231b8869fb06 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1132.849150] env[62730]: DEBUG oslo_concurrency.lockutils [req-3e751b2e-ba0b-430f-9268-431681ed3477 req-d9f1e5a7-e548-4f3e-afe9-07d2baff4f81 service nova] Acquiring lock "3a61955c-d6df-4024-bc41-b1100a89fd7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.849150] env[62730]: DEBUG oslo_concurrency.lockutils [req-3e751b2e-ba0b-430f-9268-431681ed3477 req-d9f1e5a7-e548-4f3e-afe9-07d2baff4f81 service nova] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.849150] env[62730]: DEBUG oslo_concurrency.lockutils [req-3e751b2e-ba0b-430f-9268-431681ed3477 req-d9f1e5a7-e548-4f3e-afe9-07d2baff4f81 service nova] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.850015] env[62730]: DEBUG nova.compute.manager [req-3e751b2e-ba0b-430f-9268-431681ed3477 req-d9f1e5a7-e548-4f3e-afe9-07d2baff4f81 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] No waiting events found dispatching network-vif-plugged-44fe65f7-b218-49ba-9d5d-231b8869fb06 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1132.850330] env[62730]: WARNING nova.compute.manager [req-3e751b2e-ba0b-430f-9268-431681ed3477 req-d9f1e5a7-e548-4f3e-afe9-07d2baff4f81 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Received unexpected event network-vif-plugged-44fe65f7-b218-49ba-9d5d-231b8869fb06 for instance with vm_state building and task_state spawning. [ 1132.882370] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Successfully updated port: 44fe65f7-b218-49ba-9d5d-231b8869fb06 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1132.892481] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "refresh_cache-3a61955c-d6df-4024-bc41-b1100a89fd7f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.892796] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired lock "refresh_cache-3a61955c-d6df-4024-bc41-b1100a89fd7f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.892796] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1132.943755] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1133.488579] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Updating instance_info_cache with network_info: [{"id": "44fe65f7-b218-49ba-9d5d-231b8869fb06", "address": "fa:16:3e:7f:a7:c2", "network": {"id": "ab9217ac-7102-427d-80be-6901bc91ce6f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-684736298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca2739fcb8b4c7db333ac9aa362ca50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44fe65f7-b2", "ovs_interfaceid": "44fe65f7-b218-49ba-9d5d-231b8869fb06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.504595] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Releasing lock "refresh_cache-3a61955c-d6df-4024-bc41-b1100a89fd7f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.504918] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Instance network_info: |[{"id": "44fe65f7-b218-49ba-9d5d-231b8869fb06", "address": "fa:16:3e:7f:a7:c2", "network": {"id": "ab9217ac-7102-427d-80be-6901bc91ce6f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-684736298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca2739fcb8b4c7db333ac9aa362ca50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44fe65f7-b2", "ovs_interfaceid": "44fe65f7-b218-49ba-9d5d-231b8869fb06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1133.505471] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:a7:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44fe65f7-b218-49ba-9d5d-231b8869fb06', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.513038] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating folder: Project (1ca2739fcb8b4c7db333ac9aa362ca50). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1133.513768] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34e339ba-bb91-4263-a56a-86c1387b9237 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.525285] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Created folder: Project (1ca2739fcb8b4c7db333ac9aa362ca50) in parent group-v942928. [ 1133.527356] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating folder: Instances. Parent ref: group-v942990. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1133.527356] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83cbe273-cfd6-44d3-baf5-eff185c47dff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.536018] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Created folder: Instances in parent group-v942990. [ 1133.536293] env[62730]: DEBUG oslo.service.loopingcall [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1133.536491] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1133.536700] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbf2a523-8337-45a9-8b8b-4591a2df32a0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.557638] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.557638] env[62730]: value = "task-4837157" [ 1133.557638] env[62730]: _type = "Task" [ 1133.557638] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.566484] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837157, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.075103] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837157, 'name': CreateVM_Task, 'duration_secs': 0.296098} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.075103] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1134.075543] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.075661] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.075980] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1134.076258] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b3c08d8-9b28-49e9-ac7b-0b5c20e24956 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.081790] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1134.081790] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5222ddc9-c842-0d84-aaca-f2f38dfc1578" [ 1134.081790] env[62730]: _type = "Task" [ 1134.081790] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.091584] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5222ddc9-c842-0d84-aaca-f2f38dfc1578, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.593068] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.593068] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.593322] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.010382] env[62730]: DEBUG nova.compute.manager [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Received event network-changed-44fe65f7-b218-49ba-9d5d-231b8869fb06 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1135.010719] env[62730]: DEBUG nova.compute.manager [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Refreshing instance network info cache due to event network-changed-44fe65f7-b218-49ba-9d5d-231b8869fb06. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1135.010975] env[62730]: DEBUG oslo_concurrency.lockutils [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] Acquiring lock "refresh_cache-3a61955c-d6df-4024-bc41-b1100a89fd7f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.011226] env[62730]: DEBUG oslo_concurrency.lockutils [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] Acquired lock "refresh_cache-3a61955c-d6df-4024-bc41-b1100a89fd7f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.011434] env[62730]: DEBUG nova.network.neutron [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Refreshing network info cache for port 44fe65f7-b218-49ba-9d5d-231b8869fb06 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1135.473950] env[62730]: DEBUG nova.network.neutron [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Updated VIF entry in instance network info cache for port 44fe65f7-b218-49ba-9d5d-231b8869fb06. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1135.474341] env[62730]: DEBUG nova.network.neutron [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Updating instance_info_cache with network_info: [{"id": "44fe65f7-b218-49ba-9d5d-231b8869fb06", "address": "fa:16:3e:7f:a7:c2", "network": {"id": "ab9217ac-7102-427d-80be-6901bc91ce6f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-684736298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca2739fcb8b4c7db333ac9aa362ca50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44fe65f7-b2", "ovs_interfaceid": "44fe65f7-b218-49ba-9d5d-231b8869fb06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.483870] env[62730]: DEBUG oslo_concurrency.lockutils [req-97f55f97-81d4-4a51-aebb-f84d51243a9a req-9f00f5be-ba0b-4363-85ff-67e45bd73415 service nova] Releasing lock "refresh_cache-3a61955c-d6df-4024-bc41-b1100a89fd7f" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.399280] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.075960] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "d276dbe7-a0fc-4518-9006-a0d749c07984" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.076270] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.100575] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.101086] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.738773] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.738161] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.737627] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.737636] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.732888] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.736612] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.736812] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1152.736946] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1152.760903] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.761332] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.761543] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.761804] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.761975] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.762184] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.762409] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.762601] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.762794] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.763021] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1152.763275] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1153.737706] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.749272] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.749516] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.749698] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.749856] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1153.751526] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29973ed1-c850-4310-a035-a2eb80f32113 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.760518] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e77cc5-58d2-4761-b78d-4dd789b879c2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.775934] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3595cc8d-c93e-45f5-a27f-3e2638c711a2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.783136] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f457331-6001-41a2-b263-3245f271eda4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.814223] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180552MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1153.814416] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.815052] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.890683] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 540af840-eba5-4cee-a37c-6d6809a24f95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.890889] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.891067] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.891232] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.891403] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.891560] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.891710] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.891859] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.892016] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.892180] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1153.903825] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 77b49a77-2048-4812-93bc-aba06586d2a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1153.915091] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8ab13896-dd97-47cc-8013-9fe9dc791ef6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1153.926467] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 699d4cca-99b8-4517-957b-949afe791aed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1153.936899] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1153.947744] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 7f22463d-9e8c-4d5b-b30e-86654f34b633 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1153.962419] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8f51fc3b-205b-41cb-bc95-1f0e694dda76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1153.973432] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ffe28344-6909-4252-b899-4a2d66b1d6df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.011088] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 234808e0-4e10-4209-96c0-fa61fe2cdbe3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.024864] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance de6f4f4c-b07a-437e-b01b-e7a7b600fc25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.035492] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 81f8a8a0-9897-424e-aaa7-02e902b996d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.047537] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.059247] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6a4c0163-c6e3-406d-bcb4-5baf627433e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.071493] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 87a01d32-4dcc-4e97-a39c-d48c146c18fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.082904] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.094224] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.105286] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1154.105536] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1154.105856] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '49', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_976763dbb98a4b04a9cda2b0a5482452': '1', 'io_workload': '10', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_c54046535dc74172a58cc8e350f2d88d': '1', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1', 'num_task_spawning': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1154.431313] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1990ffe6-6b8f-4c95-adc4-7c533b975244 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.439456] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e39ad28-1078-417c-80fc-58a1058432f7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.470806] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea004e0-4d72-4482-a573-7f5ee0e07770 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.479730] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44deb33e-c11b-4677-af69-ea9c3492c8f5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.493627] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.502286] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1154.517200] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1154.517200] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.702s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.516814] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.737220] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.737522] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1166.212227] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "3a61955c-d6df-4024-bc41-b1100a89fd7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.019247] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f5aca42-0ea5-4ac3-a208-1d8e40fb9005 tempest-ServersAaction247Test-1557020538 tempest-ServersAaction247Test-1557020538-project-member] Acquiring lock "175517cd-b112-4aa4-87e0-e74c1d9a07fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.019579] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f5aca42-0ea5-4ac3-a208-1d8e40fb9005 tempest-ServersAaction247Test-1557020538 tempest-ServersAaction247Test-1557020538-project-member] Lock "175517cd-b112-4aa4-87e0-e74c1d9a07fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.174444] env[62730]: WARNING oslo_vmware.rw_handles [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1178.174444] env[62730]: ERROR oslo_vmware.rw_handles [ 1178.175023] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1178.177759] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1178.178103] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Copying Virtual Disk [datastore2] vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/338df372-bf3e-4956-b4f0-bc8db7cc0897/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1178.178409] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f05225b-9810-4b93-894e-cd305f350599 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.187889] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Waiting for the task: (returnval){ [ 1178.187889] env[62730]: value = "task-4837158" [ 1178.187889] env[62730]: _type = "Task" [ 1178.187889] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.197224] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Task: {'id': task-4837158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.698684] env[62730]: DEBUG oslo_vmware.exceptions [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1178.698969] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.699552] env[62730]: ERROR nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1178.699552] env[62730]: Faults: ['InvalidArgument'] [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Traceback (most recent call last): [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] yield resources [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self.driver.spawn(context, instance, image_meta, [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self._fetch_image_if_missing(context, vi) [ 1178.699552] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] image_cache(vi, tmp_image_ds_loc) [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] vm_util.copy_virtual_disk( [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] session._wait_for_task(vmdk_copy_task) [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] return self.wait_for_task(task_ref) [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] return evt.wait() [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] result = hub.switch() [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1178.699965] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] return self.greenlet.switch() [ 1178.700404] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1178.700404] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self.f(*self.args, **self.kw) [ 1178.700404] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1178.700404] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] raise exceptions.translate_fault(task_info.error) [ 1178.700404] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1178.700404] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Faults: ['InvalidArgument'] [ 1178.700404] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] [ 1178.700404] env[62730]: INFO nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Terminating instance [ 1178.701615] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.701836] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.702469] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1178.702665] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1178.702889] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1302f1d-0018-4a02-9359-c76222bded17 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.706041] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8054c2-3ee5-4742-986c-8ba27e499215 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.713123] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1178.713371] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d65b54e2-f780-4458-8f3b-f7dc543a5ed5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.715810] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.715989] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1178.716978] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e28fe26b-53c3-4bc0-847f-d2183b0cbdda {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.721984] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Waiting for the task: (returnval){ [ 1178.721984] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]525a4292-199d-af4c-7870-94171c508904" [ 1178.721984] env[62730]: _type = "Task" [ 1178.721984] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.736037] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]525a4292-199d-af4c-7870-94171c508904, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.784364] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1178.784787] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1178.785014] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Deleting the datastore file [datastore2] 540af840-eba5-4cee-a37c-6d6809a24f95 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1178.785325] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e786aa72-05ec-43fa-8ca0-9f115e3f4719 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.791941] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Waiting for the task: (returnval){ [ 1178.791941] env[62730]: value = "task-4837160" [ 1178.791941] env[62730]: _type = "Task" [ 1178.791941] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.800346] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Task: {'id': task-4837160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.232976] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1179.233267] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Creating directory with path [datastore2] vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1179.233514] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e7f1edc-f9e6-4cca-b341-bc92e2d30885 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.246249] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Created directory with path [datastore2] vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1179.246484] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Fetch image to [datastore2] vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1179.246657] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1179.247466] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6b3e5b-dcac-4e46-95d8-d61936d7032d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.254529] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553a0b6b-97bc-4193-a01c-30a673fe6fe7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.263629] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9390f7-ad56-4078-9a1e-88313f233e41 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.298720] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40058e47-e096-4042-816b-b78cc1780411 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.306685] env[62730]: DEBUG oslo_vmware.api [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Task: {'id': task-4837160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070456} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.308263] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1179.308462] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1179.308641] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1179.308820] env[62730]: INFO nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1179.310702] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8bd67fa7-1477-4644-a45e-172d325602ab {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.313511] env[62730]: DEBUG nova.compute.claims [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1179.313697] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.313915] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.334097] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1179.525597] env[62730]: DEBUG oslo_vmware.rw_handles [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1179.588320] env[62730]: DEBUG oslo_vmware.rw_handles [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1179.588528] env[62730]: DEBUG oslo_vmware.rw_handles [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1179.731465] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe24d34e-312f-43aa-a8ce-121e098b9522 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.740023] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e006a203-0a99-4810-8dbd-eae49aaa0d92 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.769740] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985a424e-2084-4e05-acd4-b6f2894a0fa2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.777883] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f78b35-3607-47b2-9fb1-8620fa4135ab {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.791122] env[62730]: DEBUG nova.compute.provider_tree [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.801752] env[62730]: DEBUG nova.scheduler.client.report [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1179.817267] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.503s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.817836] env[62730]: ERROR nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1179.817836] env[62730]: Faults: ['InvalidArgument'] [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Traceback (most recent call last): [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self.driver.spawn(context, instance, image_meta, [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self._fetch_image_if_missing(context, vi) [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] image_cache(vi, tmp_image_ds_loc) [ 1179.817836] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] vm_util.copy_virtual_disk( [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] session._wait_for_task(vmdk_copy_task) [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] return self.wait_for_task(task_ref) [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] return evt.wait() [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] result = hub.switch() [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] return self.greenlet.switch() [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1179.818227] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] self.f(*self.args, **self.kw) [ 1179.818576] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1179.818576] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] raise exceptions.translate_fault(task_info.error) [ 1179.818576] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1179.818576] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Faults: ['InvalidArgument'] [ 1179.818576] env[62730]: ERROR nova.compute.manager [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] [ 1179.818576] env[62730]: DEBUG nova.compute.utils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1179.820357] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Build of instance 540af840-eba5-4cee-a37c-6d6809a24f95 was re-scheduled: A specified parameter was not correct: fileType [ 1179.820357] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1179.820750] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1179.820924] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1179.821116] env[62730]: DEBUG nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1179.821289] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1180.636505] env[62730]: DEBUG nova.network.neutron [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.648977] env[62730]: INFO nova.compute.manager [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Took 0.83 seconds to deallocate network for instance. [ 1180.765951] env[62730]: INFO nova.scheduler.client.report [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Deleted allocations for instance 540af840-eba5-4cee-a37c-6d6809a24f95 [ 1180.791822] env[62730]: DEBUG oslo_concurrency.lockutils [None req-99ebdfae-6ebe-49df-a905-6ae588175a76 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "540af840-eba5-4cee-a37c-6d6809a24f95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 614.143s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.793087] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "540af840-eba5-4cee-a37c-6d6809a24f95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 414.614s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.793318] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Acquiring lock "540af840-eba5-4cee-a37c-6d6809a24f95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.793529] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "540af840-eba5-4cee-a37c-6d6809a24f95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.793712] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "540af840-eba5-4cee-a37c-6d6809a24f95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.795788] env[62730]: INFO nova.compute.manager [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Terminating instance [ 1180.798112] env[62730]: DEBUG nova.compute.manager [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1180.798345] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1180.799095] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf604f1a-5ab1-4b07-8524-08c27d04b5b1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.808975] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb8a1c6-ed3c-4653-ace2-1610b2b4a001 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.820405] env[62730]: DEBUG nova.compute.manager [None req-c66a379c-202b-47c2-b203-26231f26fb35 tempest-ServersV294TestFqdnHostnames-1731655541 tempest-ServersV294TestFqdnHostnames-1731655541-project-member] [instance: 77b49a77-2048-4812-93bc-aba06586d2a2] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1180.843409] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 540af840-eba5-4cee-a37c-6d6809a24f95 could not be found. [ 1180.843598] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1180.843794] env[62730]: INFO nova.compute.manager [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1180.844068] env[62730]: DEBUG oslo.service.loopingcall [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1180.844419] env[62730]: DEBUG nova.compute.manager [-] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1180.844554] env[62730]: DEBUG nova.network.neutron [-] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1180.847270] env[62730]: DEBUG nova.compute.manager [None req-c66a379c-202b-47c2-b203-26231f26fb35 tempest-ServersV294TestFqdnHostnames-1731655541 tempest-ServersV294TestFqdnHostnames-1731655541-project-member] [instance: 77b49a77-2048-4812-93bc-aba06586d2a2] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1180.870831] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c66a379c-202b-47c2-b203-26231f26fb35 tempest-ServersV294TestFqdnHostnames-1731655541 tempest-ServersV294TestFqdnHostnames-1731655541-project-member] Lock "77b49a77-2048-4812-93bc-aba06586d2a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.467s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.873628] env[62730]: DEBUG nova.network.neutron [-] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.879774] env[62730]: DEBUG nova.compute.manager [None req-96dbf19d-3fa6-426a-8698-ba570676fead tempest-ServerDiagnosticsTest-42425005 tempest-ServerDiagnosticsTest-42425005-project-member] [instance: 8ab13896-dd97-47cc-8013-9fe9dc791ef6] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1180.882429] env[62730]: INFO nova.compute.manager [-] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] Took 0.04 seconds to deallocate network for instance. [ 1180.902931] env[62730]: DEBUG nova.compute.manager [None req-96dbf19d-3fa6-426a-8698-ba570676fead tempest-ServerDiagnosticsTest-42425005 tempest-ServerDiagnosticsTest-42425005-project-member] [instance: 8ab13896-dd97-47cc-8013-9fe9dc791ef6] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1180.927666] env[62730]: DEBUG oslo_concurrency.lockutils [None req-96dbf19d-3fa6-426a-8698-ba570676fead tempest-ServerDiagnosticsTest-42425005 tempest-ServerDiagnosticsTest-42425005-project-member] Lock "8ab13896-dd97-47cc-8013-9fe9dc791ef6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.452s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.937560] env[62730]: DEBUG nova.compute.manager [None req-2e995aa2-8e1c-4a72-9516-9a28e9097f05 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 699d4cca-99b8-4517-957b-949afe791aed] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1180.964035] env[62730]: DEBUG nova.compute.manager [None req-2e995aa2-8e1c-4a72-9516-9a28e9097f05 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 699d4cca-99b8-4517-957b-949afe791aed] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1180.979644] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c1c85557-a19d-42a2-9e4d-559747bd5083 tempest-ServersTestManualDisk-1605605198 tempest-ServersTestManualDisk-1605605198-project-member] Lock "540af840-eba5-4cee-a37c-6d6809a24f95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.980839] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "540af840-eba5-4cee-a37c-6d6809a24f95" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 86.930s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.981055] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 540af840-eba5-4cee-a37c-6d6809a24f95] During sync_power_state the instance has a pending task (deleting). Skip. [ 1180.981249] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "540af840-eba5-4cee-a37c-6d6809a24f95" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.989178] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2e995aa2-8e1c-4a72-9516-9a28e9097f05 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "699d4cca-99b8-4517-957b-949afe791aed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.010s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.998569] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1181.065883] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.066228] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.067796] env[62730]: INFO nova.compute.claims [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1181.477075] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6e147e-b2fa-4e8d-b5dc-076e2fd5e063 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.485043] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40d7308-d52b-4996-81e5-05b237b91b56 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.514978] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2ca0ad-722b-4af7-b4b1-5392d78e17a4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.523202] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17340ac-8a5f-43ad-8da1-c7e3efabbb3c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.537062] env[62730]: DEBUG nova.compute.provider_tree [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.546540] env[62730]: DEBUG nova.scheduler.client.report [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1181.564735] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.498s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.565279] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1181.631942] env[62730]: DEBUG nova.compute.utils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1181.633396] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1181.633579] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1181.646895] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1181.730644] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1181.755540] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1181.756703] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1181.756703] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1181.756703] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1181.756703] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1181.756703] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1181.756911] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1181.756946] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1181.757252] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1181.757456] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1181.757685] env[62730]: DEBUG nova.virt.hardware [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1181.758575] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1aec0a-2f19-42bf-ba66-a1c295a95b03 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.769062] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794a6002-20fb-4b74-9783-ced00405e60d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.793265] env[62730]: DEBUG nova.policy [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff834274d6ba4264a641bdb67bb14808', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9992614978224ad7bd8ed947a0cf69bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1182.200835] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Successfully created port: 06e8f193-4674-4238-9b86-eef19a100040 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1184.036567] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "9c36edef-9792-4f26-88c0-94a07eb1f588" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.036567] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.036567] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Successfully updated port: 06e8f193-4674-4238-9b86-eef19a100040 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.053025] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "refresh_cache-435af367-8af8-4e07-b96a-923d32cc645e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.053025] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquired lock "refresh_cache-435af367-8af8-4e07-b96a-923d32cc645e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.053025] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1184.150200] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1184.166208] env[62730]: DEBUG nova.compute.manager [req-ba066a7f-7c2c-40ad-bb2f-badaad064cb6 req-b1a0d220-9f63-456f-90c0-55978a1582a3 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Received event network-vif-plugged-06e8f193-4674-4238-9b86-eef19a100040 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1184.166208] env[62730]: DEBUG oslo_concurrency.lockutils [req-ba066a7f-7c2c-40ad-bb2f-badaad064cb6 req-b1a0d220-9f63-456f-90c0-55978a1582a3 service nova] Acquiring lock "435af367-8af8-4e07-b96a-923d32cc645e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.166208] env[62730]: DEBUG oslo_concurrency.lockutils [req-ba066a7f-7c2c-40ad-bb2f-badaad064cb6 req-b1a0d220-9f63-456f-90c0-55978a1582a3 service nova] Lock "435af367-8af8-4e07-b96a-923d32cc645e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.166208] env[62730]: DEBUG oslo_concurrency.lockutils [req-ba066a7f-7c2c-40ad-bb2f-badaad064cb6 req-b1a0d220-9f63-456f-90c0-55978a1582a3 service nova] Lock "435af367-8af8-4e07-b96a-923d32cc645e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.166356] env[62730]: DEBUG nova.compute.manager [req-ba066a7f-7c2c-40ad-bb2f-badaad064cb6 req-b1a0d220-9f63-456f-90c0-55978a1582a3 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] No waiting events found dispatching network-vif-plugged-06e8f193-4674-4238-9b86-eef19a100040 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1184.166961] env[62730]: WARNING nova.compute.manager [req-ba066a7f-7c2c-40ad-bb2f-badaad064cb6 req-b1a0d220-9f63-456f-90c0-55978a1582a3 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Received unexpected event network-vif-plugged-06e8f193-4674-4238-9b86-eef19a100040 for instance with vm_state building and task_state spawning. [ 1184.517610] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Updating instance_info_cache with network_info: [{"id": "06e8f193-4674-4238-9b86-eef19a100040", "address": "fa:16:3e:49:08:13", "network": {"id": "90078078-eec6-4b4e-92ad-9c315af8581f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-650905529-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9992614978224ad7bd8ed947a0cf69bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e8f193-46", "ovs_interfaceid": "06e8f193-4674-4238-9b86-eef19a100040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.535012] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Releasing lock "refresh_cache-435af367-8af8-4e07-b96a-923d32cc645e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.535407] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Instance network_info: |[{"id": "06e8f193-4674-4238-9b86-eef19a100040", "address": "fa:16:3e:49:08:13", "network": {"id": "90078078-eec6-4b4e-92ad-9c315af8581f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-650905529-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9992614978224ad7bd8ed947a0cf69bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e8f193-46", "ovs_interfaceid": "06e8f193-4674-4238-9b86-eef19a100040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1184.536101] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:08:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496ac502-bfc4-4324-8332-cac473eb7cc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06e8f193-4674-4238-9b86-eef19a100040', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1184.544550] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Creating folder: Project (9992614978224ad7bd8ed947a0cf69bc). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1184.545668] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37306c1d-fc8d-4518-a303-659a5d0c9db4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.558948] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Created folder: Project (9992614978224ad7bd8ed947a0cf69bc) in parent group-v942928. [ 1184.559287] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Creating folder: Instances. Parent ref: group-v942993. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1184.560065] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86a02011-dfaf-4e47-8c6b-f2d66c548b43 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.574090] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Created folder: Instances in parent group-v942993. [ 1184.574374] env[62730]: DEBUG oslo.service.loopingcall [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1184.574796] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1184.575084] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-482ba76c-dd4b-4a19-a199-46bb70044e30 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.597931] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1184.597931] env[62730]: value = "task-4837163" [ 1184.597931] env[62730]: _type = "Task" [ 1184.597931] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.607410] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837163, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.108605] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837163, 'name': CreateVM_Task, 'duration_secs': 0.323193} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.108858] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1185.127186] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.127186] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.127186] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1185.127186] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50c82c9d-ea8c-424c-8e20-4ee17747e6ad {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.132505] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Waiting for the task: (returnval){ [ 1185.132505] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5226c1ea-ade2-cd8c-1b0b-ade557908e21" [ 1185.132505] env[62730]: _type = "Task" [ 1185.132505] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.142102] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5226c1ea-ade2-cd8c-1b0b-ade557908e21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.289466] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a9e2d498-536a-47b9-9670-523b109063a6 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.289882] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a9e2d498-536a-47b9-9670-523b109063a6 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.644959] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.644959] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1185.645287] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1186.190085] env[62730]: DEBUG nova.compute.manager [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Received event network-changed-06e8f193-4674-4238-9b86-eef19a100040 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1186.190424] env[62730]: DEBUG nova.compute.manager [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Refreshing instance network info cache due to event network-changed-06e8f193-4674-4238-9b86-eef19a100040. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1186.190591] env[62730]: DEBUG oslo_concurrency.lockutils [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] Acquiring lock "refresh_cache-435af367-8af8-4e07-b96a-923d32cc645e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1186.190687] env[62730]: DEBUG oslo_concurrency.lockutils [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] Acquired lock "refresh_cache-435af367-8af8-4e07-b96a-923d32cc645e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.190867] env[62730]: DEBUG nova.network.neutron [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Refreshing network info cache for port 06e8f193-4674-4238-9b86-eef19a100040 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1186.605638] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "435af367-8af8-4e07-b96a-923d32cc645e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.684415] env[62730]: DEBUG nova.network.neutron [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Updated VIF entry in instance network info cache for port 06e8f193-4674-4238-9b86-eef19a100040. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1186.684825] env[62730]: DEBUG nova.network.neutron [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Updating instance_info_cache with network_info: [{"id": "06e8f193-4674-4238-9b86-eef19a100040", "address": "fa:16:3e:49:08:13", "network": {"id": "90078078-eec6-4b4e-92ad-9c315af8581f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-650905529-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9992614978224ad7bd8ed947a0cf69bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06e8f193-46", "ovs_interfaceid": "06e8f193-4674-4238-9b86-eef19a100040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.695589] env[62730]: DEBUG oslo_concurrency.lockutils [req-f01f04f2-fbfb-4c80-9578-2b53162852f5 req-3a06f71c-2e26-48f2-915a-5a532d2a9ef8 service nova] Releasing lock "refresh_cache-435af367-8af8-4e07-b96a-923d32cc645e" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.743630] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d4987e11-0b5e-4331-9378-00c1af7e24b2 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "07bb9890-0ebe-4ce3-98b9-2fe35a9a6796" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.743991] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d4987e11-0b5e-4331-9378-00c1af7e24b2 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "07bb9890-0ebe-4ce3-98b9-2fe35a9a6796" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.817194] env[62730]: DEBUG oslo_concurrency.lockutils [None req-6e18dc7e-cc53-4a77-a713-45e7c5102144 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Acquiring lock "be7f1a05-96f9-430c-b5ad-13fa1aae685b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.817469] env[62730]: DEBUG oslo_concurrency.lockutils [None req-6e18dc7e-cc53-4a77-a713-45e7c5102144 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Lock "be7f1a05-96f9-430c-b5ad-13fa1aae685b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.738290] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.734224] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.737917] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.738588] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.732804] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.736511] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.738597] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.738912] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1213.738912] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1213.760042] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.760229] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.760347] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.760476] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.760601] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.760724] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.760845] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.760964] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.761096] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.761238] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1213.761401] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1214.737639] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.749334] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.749630] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.749849] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.749888] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1214.751040] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ae6529-f276-41cd-bd09-66f528c8f92d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.760241] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19078dcf-57c8-48df-96fe-8bf7122a164c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.774951] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67436ceb-23e2-4694-b1ea-ab318878c02b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.782558] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86eb9196-7de6-4384-bdd2-02ed5b11be04 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.811657] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180487MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1214.811822] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.812019] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.889571] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.889732] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.889861] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.889983] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.890127] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.890250] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.890367] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.890482] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.890598] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.890709] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1214.902483] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance de6f4f4c-b07a-437e-b01b-e7a7b600fc25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.913381] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 81f8a8a0-9897-424e-aaa7-02e902b996d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.923537] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.934138] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6a4c0163-c6e3-406d-bcb4-5baf627433e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.943881] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 87a01d32-4dcc-4e97-a39c-d48c146c18fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.955265] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.965470] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.977063] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1214.988072] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 175517cd-b112-4aa4-87e0-e74c1d9a07fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1215.023844] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1215.036039] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1215.046793] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 07bb9890-0ebe-4ce3-98b9-2fe35a9a6796 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1215.057850] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance be7f1a05-96f9-430c-b5ad-13fa1aae685b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1215.058163] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1215.058285] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '53', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_b9a9c0281e6f463aab4a2f5fcb1019a1': '1', 'io_workload': '10', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_c54046535dc74172a58cc8e350f2d88d': '1', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_9992614978224ad7bd8ed947a0cf69bc': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1215.328646] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ccadf0-38b3-462e-b139-9d70268091f7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.336485] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50af2b7-ee8e-4167-a835-1184457957be {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.365697] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70f5a4f-929a-4417-9055-5ac54435cf28 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.373096] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1921f970-7af1-4ba2-88b0-8d56f602153e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.387163] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.395839] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1215.410746] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1215.410970] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.599s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.410846] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.531286] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8f30016e-2035-4661-9031-4df05342c015 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "c0ada899-0ddb-456a-a1f3-097529654318" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.531737] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8f30016e-2035-4661-9031-4df05342c015 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c0ada899-0ddb-456a-a1f3-097529654318" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.064100] env[62730]: DEBUG oslo_concurrency.lockutils [None req-762028c6-c8c5-45fc-928c-3a23315267e5 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Acquiring lock "f5f9fdc8-ca89-438e-a710-b3e1dd85f550" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.064351] env[62730]: DEBUG oslo_concurrency.lockutils [None req-762028c6-c8c5-45fc-928c-3a23315267e5 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "f5f9fdc8-ca89-438e-a710-b3e1dd85f550" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.737308] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1219.737792] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1228.194766] env[62730]: WARNING oslo_vmware.rw_handles [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1228.194766] env[62730]: ERROR oslo_vmware.rw_handles [ 1228.195466] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1228.198204] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1228.198204] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Copying Virtual Disk [datastore2] vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/74fd920b-de2d-4aeb-a2c0-6d712fd37519/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1228.198204] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4c12b5a-8fda-45b0-9ab1-7d9f0129e13a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.206779] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Waiting for the task: (returnval){ [ 1228.206779] env[62730]: value = "task-4837164" [ 1228.206779] env[62730]: _type = "Task" [ 1228.206779] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.215602] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Task: {'id': task-4837164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.718143] env[62730]: DEBUG oslo_vmware.exceptions [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1228.718468] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1228.719059] env[62730]: ERROR nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1228.719059] env[62730]: Faults: ['InvalidArgument'] [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Traceback (most recent call last): [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] yield resources [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self.driver.spawn(context, instance, image_meta, [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self._fetch_image_if_missing(context, vi) [ 1228.719059] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] image_cache(vi, tmp_image_ds_loc) [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] vm_util.copy_virtual_disk( [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] session._wait_for_task(vmdk_copy_task) [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] return self.wait_for_task(task_ref) [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] return evt.wait() [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] result = hub.switch() [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1228.719500] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] return self.greenlet.switch() [ 1228.719966] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1228.719966] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self.f(*self.args, **self.kw) [ 1228.719966] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1228.719966] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] raise exceptions.translate_fault(task_info.error) [ 1228.719966] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1228.719966] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Faults: ['InvalidArgument'] [ 1228.719966] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] [ 1228.719966] env[62730]: INFO nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Terminating instance [ 1228.722189] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.722189] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1228.722481] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1228.722651] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1228.722903] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6db91a68-7117-48ba-bf33-5e81ba6cca77 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.725716] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5238fa92-dfd0-4e4d-aea8-010e44f21db4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.733951] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1228.735370] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c84106bc-9b19-4c65-85cd-991d1f32d42e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.736986] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1228.737190] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1228.738097] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3056405-0b97-4bf0-8cb2-c215500d3125 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.744098] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Waiting for the task: (returnval){ [ 1228.744098] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]523bb3fe-7d33-9f82-5598-adc3ab1328e4" [ 1228.744098] env[62730]: _type = "Task" [ 1228.744098] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.759080] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]523bb3fe-7d33-9f82-5598-adc3ab1328e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.819428] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1228.819676] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1228.819912] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Deleting the datastore file [datastore2] 986e37d4-d3ae-42a0-8caa-39b92636b973 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1228.820195] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77514d56-63f7-4480-8a9c-ec46a2b53386 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.841324] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Waiting for the task: (returnval){ [ 1228.841324] env[62730]: value = "task-4837166" [ 1228.841324] env[62730]: _type = "Task" [ 1228.841324] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.852650] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Task: {'id': task-4837166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.254777] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1229.255171] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Creating directory with path [datastore2] vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1229.255318] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3709799-70a7-481a-9478-25db19bf9f89 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.269211] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Created directory with path [datastore2] vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1229.269431] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Fetch image to [datastore2] vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1229.269610] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1229.270428] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b572e97-36be-462a-8040-3ea4713d7b15 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.277945] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1eff988-197d-4276-933b-3eb8b8cd1d19 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.287927] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2925ef-082e-4d9f-9b98-b30d94a30aa5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.321113] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2defe1b-b7c4-4699-bccf-0360dc0fd9c5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.327774] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4a9738c4-8825-49b6-a2be-44e26233659b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.349210] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1229.354700] env[62730]: DEBUG oslo_vmware.api [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Task: {'id': task-4837166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075821} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.354987] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1229.355199] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1229.355391] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1229.355575] env[62730]: INFO nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1229.358916] env[62730]: DEBUG nova.compute.claims [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1229.359109] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.359347] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.407949] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1229.469526] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1229.469767] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1229.763436] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4733eb8d-aabb-4d6f-8733-cc2ee08ee8ca {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.771953] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9284fe3f-010b-485b-ab8b-b181b94fd079 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.803895] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b07a4cc-305b-4eb8-9fbb-38065ccac8eb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.812638] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c1b6d6-6489-4ee2-a418-5a37340fff52 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.827675] env[62730]: DEBUG nova.compute.provider_tree [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.837979] env[62730]: DEBUG nova.scheduler.client.report [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1229.856531] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.497s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.857142] env[62730]: ERROR nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1229.857142] env[62730]: Faults: ['InvalidArgument'] [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Traceback (most recent call last): [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self.driver.spawn(context, instance, image_meta, [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self._fetch_image_if_missing(context, vi) [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] image_cache(vi, tmp_image_ds_loc) [ 1229.857142] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] vm_util.copy_virtual_disk( [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] session._wait_for_task(vmdk_copy_task) [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] return self.wait_for_task(task_ref) [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] return evt.wait() [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] result = hub.switch() [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] return self.greenlet.switch() [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1229.857969] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] self.f(*self.args, **self.kw) [ 1229.858622] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1229.858622] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] raise exceptions.translate_fault(task_info.error) [ 1229.858622] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1229.858622] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Faults: ['InvalidArgument'] [ 1229.858622] env[62730]: ERROR nova.compute.manager [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] [ 1229.858622] env[62730]: DEBUG nova.compute.utils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1229.860033] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Build of instance 986e37d4-d3ae-42a0-8caa-39b92636b973 was re-scheduled: A specified parameter was not correct: fileType [ 1229.860033] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1229.860033] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1229.860216] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1229.860300] env[62730]: DEBUG nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1229.860512] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1230.265813] env[62730]: DEBUG nova.network.neutron [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.284105] env[62730]: INFO nova.compute.manager [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Took 0.42 seconds to deallocate network for instance. [ 1230.396036] env[62730]: INFO nova.scheduler.client.report [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Deleted allocations for instance 986e37d4-d3ae-42a0-8caa-39b92636b973 [ 1230.418515] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e54725ec-7aa3-4651-acbf-47dfcb0b7864 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 660.538s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.423037] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 460.400s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.423037] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Acquiring lock "986e37d4-d3ae-42a0-8caa-39b92636b973-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.423037] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.423305] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.423751] env[62730]: INFO nova.compute.manager [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Terminating instance [ 1230.425600] env[62730]: DEBUG nova.compute.manager [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1230.425789] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1230.427073] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7dba7f3-d49e-4d01-ae4a-f7b57565eecd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.433581] env[62730]: DEBUG nova.compute.manager [None req-5b2d8d30-2b7e-41f0-a7d8-0d91f5e1e473 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 7f22463d-9e8c-4d5b-b30e-86654f34b633] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1230.441614] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5c88bf-3932-42cb-b22a-ca9389e35b1c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.461207] env[62730]: DEBUG nova.compute.manager [None req-5b2d8d30-2b7e-41f0-a7d8-0d91f5e1e473 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 7f22463d-9e8c-4d5b-b30e-86654f34b633] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1230.477823] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 986e37d4-d3ae-42a0-8caa-39b92636b973 could not be found. [ 1230.478062] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1230.478249] env[62730]: INFO nova.compute.manager [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1230.478557] env[62730]: DEBUG oslo.service.loopingcall [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1230.480898] env[62730]: DEBUG nova.compute.manager [-] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1230.481032] env[62730]: DEBUG nova.network.neutron [-] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1230.491943] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5b2d8d30-2b7e-41f0-a7d8-0d91f5e1e473 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "7f22463d-9e8c-4d5b-b30e-86654f34b633" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.614s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.503352] env[62730]: DEBUG nova.compute.manager [None req-e1beb23e-4933-4a59-83fe-c27c4be1767c tempest-ServerShowV254Test-1052167189 tempest-ServerShowV254Test-1052167189-project-member] [instance: 8f51fc3b-205b-41cb-bc95-1f0e694dda76] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1230.527905] env[62730]: DEBUG nova.compute.manager [None req-e1beb23e-4933-4a59-83fe-c27c4be1767c tempest-ServerShowV254Test-1052167189 tempest-ServerShowV254Test-1052167189-project-member] [instance: 8f51fc3b-205b-41cb-bc95-1f0e694dda76] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1230.535686] env[62730]: DEBUG nova.network.neutron [-] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.549673] env[62730]: INFO nova.compute.manager [-] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] Took 0.07 seconds to deallocate network for instance. [ 1230.566989] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e1beb23e-4933-4a59-83fe-c27c4be1767c tempest-ServerShowV254Test-1052167189 tempest-ServerShowV254Test-1052167189-project-member] Lock "8f51fc3b-205b-41cb-bc95-1f0e694dda76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.382s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.602602] env[62730]: DEBUG nova.compute.manager [None req-6485cd2a-b3a3-46ec-a981-0cd7879ffc1d tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: ffe28344-6909-4252-b899-4a2d66b1d6df] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1230.639165] env[62730]: DEBUG nova.compute.manager [None req-6485cd2a-b3a3-46ec-a981-0cd7879ffc1d tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: ffe28344-6909-4252-b899-4a2d66b1d6df] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1230.669489] env[62730]: DEBUG oslo_concurrency.lockutils [None req-6485cd2a-b3a3-46ec-a981-0cd7879ffc1d tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "ffe28344-6909-4252-b899-4a2d66b1d6df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.979s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.689527] env[62730]: DEBUG nova.compute.manager [None req-8a8bbdd7-53ce-4fcd-abeb-b7a5757510f8 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] [instance: 234808e0-4e10-4209-96c0-fa61fe2cdbe3] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1230.721072] env[62730]: DEBUG nova.compute.manager [None req-8a8bbdd7-53ce-4fcd-abeb-b7a5757510f8 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] [instance: 234808e0-4e10-4209-96c0-fa61fe2cdbe3] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1230.729017] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ab9f6023-7d4f-4a94-be5c-89620e40d4a1 tempest-VolumesAssistedSnapshotsTest-538676494 tempest-VolumesAssistedSnapshotsTest-538676494-project-member] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.309s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.729919] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 136.679s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.730103] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 986e37d4-d3ae-42a0-8caa-39b92636b973] During sync_power_state the instance has a pending task (deleting). Skip. [ 1230.730988] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "986e37d4-d3ae-42a0-8caa-39b92636b973" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.743986] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8a8bbdd7-53ce-4fcd-abeb-b7a5757510f8 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Lock "234808e0-4e10-4209-96c0-fa61fe2cdbe3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.091s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.754499] env[62730]: DEBUG nova.compute.manager [None req-1e738207-f61d-4568-b768-7d5014c26714 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: de6f4f4c-b07a-437e-b01b-e7a7b600fc25] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1230.782661] env[62730]: DEBUG nova.compute.manager [None req-1e738207-f61d-4568-b768-7d5014c26714 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: de6f4f4c-b07a-437e-b01b-e7a7b600fc25] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1230.804334] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1e738207-f61d-4568-b768-7d5014c26714 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "de6f4f4c-b07a-437e-b01b-e7a7b600fc25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.372s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.813959] env[62730]: DEBUG nova.compute.manager [None req-901d04a0-d539-42bc-b91d-28859c8f67f3 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: 81f8a8a0-9897-424e-aaa7-02e902b996d9] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1230.837678] env[62730]: DEBUG nova.compute.manager [None req-901d04a0-d539-42bc-b91d-28859c8f67f3 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: 81f8a8a0-9897-424e-aaa7-02e902b996d9] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1230.858945] env[62730]: DEBUG oslo_concurrency.lockutils [None req-901d04a0-d539-42bc-b91d-28859c8f67f3 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "81f8a8a0-9897-424e-aaa7-02e902b996d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.953s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.869218] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1230.924069] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.924349] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.926046] env[62730]: INFO nova.compute.claims [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.268620] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885299e2-4b2b-4216-afdc-12101fe715d2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.276897] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32109eb3-04b1-4202-b924-9a3a58617e31 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.307949] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9c638c-a8d4-4751-b026-8bc302997b08 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.316043] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf8f721-95a9-4695-bfdb-826b72d7bcbd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.331415] env[62730]: DEBUG nova.compute.provider_tree [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.340565] env[62730]: DEBUG nova.scheduler.client.report [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1231.356529] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.432s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.357035] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1231.391711] env[62730]: DEBUG nova.compute.utils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1231.393706] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Not allocating networking since 'none' was specified. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1231.404215] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1231.461304] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.480025] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1231.507481] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1231.507779] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1231.507941] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1231.508140] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1231.508289] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1231.508458] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1231.508716] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1231.508883] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1231.509076] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1231.509507] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1231.509507] env[62730]: DEBUG nova.virt.hardware [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1231.510312] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205a4d67-b3e7-4b27-ab7a-73a1d932f287 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.518812] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842f9fcf-2339-4396-aeac-1e582d31c225 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.533681] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance VIF info [] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1231.539544] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Creating folder: Project (06d1cb82c61344ebb38e2ef9a6c95a6c). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1231.539864] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ba478e7-5e22-4947-a302-aeae540acdfe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.550515] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Created folder: Project (06d1cb82c61344ebb38e2ef9a6c95a6c) in parent group-v942928. [ 1231.550804] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Creating folder: Instances. Parent ref: group-v942996. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1231.551149] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87edf19c-b8ed-4aad-98fb-da4a6851cc49 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.562418] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Created folder: Instances in parent group-v942996. [ 1231.562694] env[62730]: DEBUG oslo.service.loopingcall [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1231.562919] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1231.563176] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22279053-cdd9-4bf5-8a2b-f07de108b173 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.583451] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.583451] env[62730]: value = "task-4837169" [ 1231.583451] env[62730]: _type = "Task" [ 1231.583451] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.591970] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837169, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.094231] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837169, 'name': CreateVM_Task, 'duration_secs': 0.292486} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.094418] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1232.094884] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.095039] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.095349] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1232.095600] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b923ac0-3848-46c7-a7a0-d47c38edfb78 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.100486] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Waiting for the task: (returnval){ [ 1232.100486] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b838f2-ab31-23c0-d841-04698953b77a" [ 1232.100486] env[62730]: _type = "Task" [ 1232.100486] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.108890] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52b838f2-ab31-23c0-d841-04698953b77a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.611104] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.611436] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.611594] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.292863] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.293221] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.738309] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.732660] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.737270] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.737473] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.737610] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.735202] env[62730]: WARNING oslo_vmware.rw_handles [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1274.735202] env[62730]: ERROR oslo_vmware.rw_handles [ 1274.736292] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1274.742613] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1274.742962] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Copying Virtual Disk [datastore2] vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/82afd945-c674-41e9-92f0-ccb70b8e1388/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1274.743288] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bad8df4-e73e-4205-92c5-354cd9e2cf5e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.752221] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Waiting for the task: (returnval){ [ 1274.752221] env[62730]: value = "task-4837170" [ 1274.752221] env[62730]: _type = "Task" [ 1274.752221] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.761458] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Task: {'id': task-4837170, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.262544] env[62730]: DEBUG oslo_vmware.exceptions [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1275.262834] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.263489] env[62730]: ERROR nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1275.263489] env[62730]: Faults: ['InvalidArgument'] [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] Traceback (most recent call last): [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] yield resources [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self.driver.spawn(context, instance, image_meta, [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self._fetch_image_if_missing(context, vi) [ 1275.263489] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] image_cache(vi, tmp_image_ds_loc) [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] vm_util.copy_virtual_disk( [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] session._wait_for_task(vmdk_copy_task) [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] return self.wait_for_task(task_ref) [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] return evt.wait() [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] result = hub.switch() [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1275.263932] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] return self.greenlet.switch() [ 1275.264408] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1275.264408] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self.f(*self.args, **self.kw) [ 1275.264408] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1275.264408] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] raise exceptions.translate_fault(task_info.error) [ 1275.264408] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1275.264408] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] Faults: ['InvalidArgument'] [ 1275.264408] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] [ 1275.264408] env[62730]: INFO nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Terminating instance [ 1275.266422] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1275.266621] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1275.266948] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.267224] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.268676] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad504e1a-6fce-4edb-a6cc-4cb0f854bb5e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.270717] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c553b325-5901-4649-9746-1b1a2fe21ceb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.277358] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1275.277588] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae988749-c0f6-42f9-869d-248058e26b83 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.280206] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.280313] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1275.281265] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12eaaf61-1c07-4004-971b-c07e61b81ef7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.286610] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Waiting for the task: (returnval){ [ 1275.286610] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52a138d9-3353-3cbc-ee68-1fd6cdc6272f" [ 1275.286610] env[62730]: _type = "Task" [ 1275.286610] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.294829] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52a138d9-3353-3cbc-ee68-1fd6cdc6272f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.351833] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1275.352169] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1275.352400] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Deleting the datastore file [datastore2] 91052772-87d4-4fb3-b590-f071c0419196 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1275.352879] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3f00517-8f34-4abc-95a1-66781ac99539 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.360265] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Waiting for the task: (returnval){ [ 1275.360265] env[62730]: value = "task-4837172" [ 1275.360265] env[62730]: _type = "Task" [ 1275.360265] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.369985] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Task: {'id': task-4837172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.737341] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.737712] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1275.737712] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1275.764063] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.764243] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.764379] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.764505] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.764630] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.764757] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.764871] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.765049] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.765106] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.765227] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1275.765349] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1275.765867] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.775809] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.776063] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.776250] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.776409] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1275.777475] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d710f32a-cd8f-4b56-8ca2-3f38b75a4119 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.786820] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4265197f-5cc5-4fab-8761-f43eb3590cbc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.805898] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1275.806172] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Creating directory with path [datastore2] vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.806537] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-391f10ce-e71d-4804-a0d7-a3df3c1b13bb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.808677] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9162d41e-7a08-40fe-b03d-00477d10f12a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.815851] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2200b16-2f2c-41c2-8a62-ff39c4246a30 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.821202] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Created directory with path [datastore2] vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.821453] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Fetch image to [datastore2] vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1275.821802] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1275.845496] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc3b9bd-49b8-45c7-93f1-c68fc371a32f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.848725] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180526MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1275.848869] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.849099] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.856038] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ea5a3d-d71e-4577-8fdf-9b91f10393c5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.868712] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66bb8a7-b275-45eb-a12a-057540e7d07e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.879056] env[62730]: DEBUG oslo_vmware.api [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Task: {'id': task-4837172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067097} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.903322] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.903561] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1275.903730] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1275.903911] env[62730]: INFO nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1275.906165] env[62730]: DEBUG nova.compute.claims [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1275.906351] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.908051] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975baae1-eb80-4630-a9c2-1ebe9fff0f83 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.919459] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a8fa251a-74c7-4166-a322-83bd1ca8afa9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.940978] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1275.948875] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 91052772-87d4-4fb3-b590-f071c0419196 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949255] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949255] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949350] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949454] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949573] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949691] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949808] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.949926] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.950057] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1275.962911] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1275.973726] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1275.984372] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1275.994660] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 175517cd-b112-4aa4-87e0-e74c1d9a07fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.004842] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.014795] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.025937] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 07bb9890-0ebe-4ce3-98b9-2fe35a9a6796 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.037028] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance be7f1a05-96f9-430c-b5ad-13fa1aae685b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.047783] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c0ada899-0ddb-456a-a1f3-097529654318 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.058337] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f5f9fdc8-ca89-438e-a710-b3e1dd85f550 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.068675] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1276.068927] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1276.069129] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '60', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_984e31062b234b6ca4d2e7a42126eb64': '1', 'io_workload': '10', 'num_proj_0dc4f70a095944708ebe176443cc2134': '1', 'num_proj_7ae994dbceb044ef8c023cb31350f1ad': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_c54046535dc74172a58cc8e350f2d88d': '1', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_9992614978224ad7bd8ed947a0cf69bc': '1', 'num_task_spawning': '1', 'num_proj_06d1cb82c61344ebb38e2ef9a6c95a6c': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1276.124723] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1276.184246] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1276.184477] env[62730]: DEBUG oslo_vmware.rw_handles [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1276.376240] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823146b3-26be-4724-a74f-f5bad27b77c2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.384536] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0283539f-0dbb-4ef7-a440-a5442dde633a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.417938] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab89a6f-b088-414b-a15b-bf9e1b4685f6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.425716] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daca524f-fcdd-435f-8e63-162d965c9fc2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.439022] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.450808] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1276.464203] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1276.464365] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.615s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.464632] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.558s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.782161] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f14187f-1f0f-4355-b6f3-3c8ce134ae80 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.790135] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe55ae0-a992-4b90-ae7b-20c75882a4d6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.821578] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec26dddd-f90b-45bf-904d-73f01394b37b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.829356] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80352ed5-8864-4d82-b15c-069573b630b6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.842849] env[62730]: DEBUG nova.compute.provider_tree [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.851720] env[62730]: DEBUG nova.scheduler.client.report [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1276.870783] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.406s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.871406] env[62730]: ERROR nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.871406] env[62730]: Faults: ['InvalidArgument'] [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] Traceback (most recent call last): [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self.driver.spawn(context, instance, image_meta, [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self._fetch_image_if_missing(context, vi) [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] image_cache(vi, tmp_image_ds_loc) [ 1276.871406] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] vm_util.copy_virtual_disk( [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] session._wait_for_task(vmdk_copy_task) [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] return self.wait_for_task(task_ref) [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] return evt.wait() [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] result = hub.switch() [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] return self.greenlet.switch() [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1276.871801] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] self.f(*self.args, **self.kw) [ 1276.872236] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1276.872236] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] raise exceptions.translate_fault(task_info.error) [ 1276.872236] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.872236] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] Faults: ['InvalidArgument'] [ 1276.872236] env[62730]: ERROR nova.compute.manager [instance: 91052772-87d4-4fb3-b590-f071c0419196] [ 1276.872236] env[62730]: DEBUG nova.compute.utils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1276.873591] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Build of instance 91052772-87d4-4fb3-b590-f071c0419196 was re-scheduled: A specified parameter was not correct: fileType [ 1276.873591] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1276.873958] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1276.874155] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1276.874360] env[62730]: DEBUG nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1276.874543] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1277.220865] env[62730]: DEBUG nova.network.neutron [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.234531] env[62730]: INFO nova.compute.manager [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Took 0.36 seconds to deallocate network for instance. [ 1277.338858] env[62730]: INFO nova.scheduler.client.report [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Deleted allocations for instance 91052772-87d4-4fb3-b590-f071c0419196 [ 1277.365032] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c148f100-acf1-4759-9517-ccae24fbd472 tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "91052772-87d4-4fb3-b590-f071c0419196" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 681.794s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.366299] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "91052772-87d4-4fb3-b590-f071c0419196" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 482.299s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.366616] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "91052772-87d4-4fb3-b590-f071c0419196-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.366940] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "91052772-87d4-4fb3-b590-f071c0419196-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.367053] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "91052772-87d4-4fb3-b590-f071c0419196-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.369156] env[62730]: INFO nova.compute.manager [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Terminating instance [ 1277.370850] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquiring lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.370968] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Acquired lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.371168] env[62730]: DEBUG nova.network.neutron [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1277.381584] env[62730]: DEBUG nova.compute.manager [None req-a7135e55-50b8-46b6-89ad-bdd8da822381 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] [instance: 6a4c0163-c6e3-406d-bcb4-5baf627433e1] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.411966] env[62730]: DEBUG nova.network.neutron [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1277.415018] env[62730]: DEBUG nova.compute.manager [None req-a7135e55-50b8-46b6-89ad-bdd8da822381 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] [instance: 6a4c0163-c6e3-406d-bcb4-5baf627433e1] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.438263] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a7135e55-50b8-46b6-89ad-bdd8da822381 tempest-AttachVolumeTestJSON-164976101 tempest-AttachVolumeTestJSON-164976101-project-member] Lock "6a4c0163-c6e3-406d-bcb4-5baf627433e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.477s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.438651] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.448149] env[62730]: DEBUG nova.compute.manager [None req-da544af3-5f75-4475-8099-38512e89efc9 tempest-ServerActionsV293TestJSON-1480316407 tempest-ServerActionsV293TestJSON-1480316407-project-member] [instance: 87a01d32-4dcc-4e97-a39c-d48c146c18fb] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.475598] env[62730]: DEBUG nova.compute.manager [None req-da544af3-5f75-4475-8099-38512e89efc9 tempest-ServerActionsV293TestJSON-1480316407 tempest-ServerActionsV293TestJSON-1480316407-project-member] [instance: 87a01d32-4dcc-4e97-a39c-d48c146c18fb] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1277.498307] env[62730]: DEBUG oslo_concurrency.lockutils [None req-da544af3-5f75-4475-8099-38512e89efc9 tempest-ServerActionsV293TestJSON-1480316407 tempest-ServerActionsV293TestJSON-1480316407-project-member] Lock "87a01d32-4dcc-4e97-a39c-d48c146c18fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.395s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.514704] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1277.575208] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.575501] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.577265] env[62730]: INFO nova.compute.claims [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1277.580405] env[62730]: DEBUG nova.network.neutron [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.588988] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Releasing lock "refresh_cache-91052772-87d4-4fb3-b590-f071c0419196" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.589438] env[62730]: DEBUG nova.compute.manager [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1277.589633] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1277.590173] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66905307-1b33-47c0-a7cc-cf8b1532b248 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.601389] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd2935f-3959-4ea2-a1d6-7390de112394 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.634842] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91052772-87d4-4fb3-b590-f071c0419196 could not be found. [ 1277.635122] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1277.635358] env[62730]: INFO nova.compute.manager [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1277.635658] env[62730]: DEBUG oslo.service.loopingcall [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.639068] env[62730]: DEBUG nova.compute.manager [-] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1277.639232] env[62730]: DEBUG nova.network.neutron [-] [instance: 91052772-87d4-4fb3-b590-f071c0419196] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1277.664450] env[62730]: DEBUG nova.network.neutron [-] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1277.674999] env[62730]: DEBUG nova.network.neutron [-] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.686926] env[62730]: INFO nova.compute.manager [-] [instance: 91052772-87d4-4fb3-b590-f071c0419196] Took 0.05 seconds to deallocate network for instance. [ 1277.797362] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c827545c-12af-47a7-8ece-226b7911a89a tempest-SecurityGroupsTestJSON-844234654 tempest-SecurityGroupsTestJSON-844234654-project-member] Lock "91052772-87d4-4fb3-b590-f071c0419196" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.431s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.798949] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "91052772-87d4-4fb3-b590-f071c0419196" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 183.747s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.798949] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 91052772-87d4-4fb3-b590-f071c0419196] During sync_power_state the instance has a pending task (deleting). Skip. [ 1277.798949] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "91052772-87d4-4fb3-b590-f071c0419196" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.928583] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9666f4be-0a68-4bbb-9dfd-7d2936a4d09d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.936649] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631572b3-7ec2-454d-8500-4eb5f45a91bd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.968446] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cd5e8e-2a8b-4230-8310-ab080e23cb9a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.976040] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa832d9-1cb6-4022-9423-d93f56b173c8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.989538] env[62730]: DEBUG nova.compute.provider_tree [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.000315] env[62730]: DEBUG nova.scheduler.client.report [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1278.016470] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.441s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.016971] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1278.051943] env[62730]: DEBUG nova.compute.utils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.053449] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1278.054062] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1278.063063] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1278.117097] env[62730]: DEBUG nova.policy [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '243a60908eb2407ebe1131d6a3a572bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93039e316cca49179277828e04a9ce61', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1278.131659] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1278.157244] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1278.157509] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1278.157671] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.157921] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1278.158182] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.158349] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1278.158556] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1278.158719] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1278.158887] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1278.159061] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1278.159275] env[62730]: DEBUG nova.virt.hardware [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1278.160153] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b79a04-0139-4736-8c99-b975f8164196 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.169047] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c527bcbb-516d-49a8-b266-f55bfd4c4d7d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.872437] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Successfully created port: ea0f3248-bf5f-456b-a6d9-19be3cafc06c {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.596456] env[62730]: DEBUG nova.compute.manager [req-532c9f5f-9ce2-495e-bc99-3d6e7e2617fa req-a611b5a9-f7ea-4fb9-922d-7f70841e006d service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] Received event network-vif-plugged-ea0f3248-bf5f-456b-a6d9-19be3cafc06c {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1279.596722] env[62730]: DEBUG oslo_concurrency.lockutils [req-532c9f5f-9ce2-495e-bc99-3d6e7e2617fa req-a611b5a9-f7ea-4fb9-922d-7f70841e006d service nova] Acquiring lock "e8657fe0-3db2-4768-817f-944a736da401-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.596949] env[62730]: DEBUG oslo_concurrency.lockutils [req-532c9f5f-9ce2-495e-bc99-3d6e7e2617fa req-a611b5a9-f7ea-4fb9-922d-7f70841e006d service nova] Lock "e8657fe0-3db2-4768-817f-944a736da401-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.597325] env[62730]: DEBUG oslo_concurrency.lockutils [req-532c9f5f-9ce2-495e-bc99-3d6e7e2617fa req-a611b5a9-f7ea-4fb9-922d-7f70841e006d service nova] Lock "e8657fe0-3db2-4768-817f-944a736da401-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.597605] env[62730]: DEBUG nova.compute.manager [req-532c9f5f-9ce2-495e-bc99-3d6e7e2617fa req-a611b5a9-f7ea-4fb9-922d-7f70841e006d service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] No waiting events found dispatching network-vif-plugged-ea0f3248-bf5f-456b-a6d9-19be3cafc06c {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1279.597714] env[62730]: WARNING nova.compute.manager [req-532c9f5f-9ce2-495e-bc99-3d6e7e2617fa req-a611b5a9-f7ea-4fb9-922d-7f70841e006d service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] Received unexpected event network-vif-plugged-ea0f3248-bf5f-456b-a6d9-19be3cafc06c for instance with vm_state building and task_state spawning. [ 1279.835784] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Successfully updated port: ea0f3248-bf5f-456b-a6d9-19be3cafc06c {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.849830] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "refresh_cache-e8657fe0-3db2-4768-817f-944a736da401" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.850687] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquired lock "refresh_cache-e8657fe0-3db2-4768-817f-944a736da401" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.850687] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1279.928515] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1280.254362] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Updating instance_info_cache with network_info: [{"id": "ea0f3248-bf5f-456b-a6d9-19be3cafc06c", "address": "fa:16:3e:fd:77:51", "network": {"id": "c32380e7-0dc3-4951-8c94-98017a9046fc", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-291728730-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93039e316cca49179277828e04a9ce61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea0f3248-bf", "ovs_interfaceid": "ea0f3248-bf5f-456b-a6d9-19be3cafc06c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.265411] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Releasing lock "refresh_cache-e8657fe0-3db2-4768-817f-944a736da401" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.265723] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Instance network_info: |[{"id": "ea0f3248-bf5f-456b-a6d9-19be3cafc06c", "address": "fa:16:3e:fd:77:51", "network": {"id": "c32380e7-0dc3-4951-8c94-98017a9046fc", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-291728730-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93039e316cca49179277828e04a9ce61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea0f3248-bf", "ovs_interfaceid": "ea0f3248-bf5f-456b-a6d9-19be3cafc06c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1280.266162] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:77:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea0f3248-bf5f-456b-a6d9-19be3cafc06c', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.273877] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Creating folder: Project (93039e316cca49179277828e04a9ce61). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.274537] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f749e41-594a-4a4b-9b18-e995ac73dff3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.287509] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Created folder: Project (93039e316cca49179277828e04a9ce61) in parent group-v942928. [ 1280.287714] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Creating folder: Instances. Parent ref: group-v942999. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.287960] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de720d65-ce57-4b9b-b879-3ca35d54979a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.298037] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Created folder: Instances in parent group-v942999. [ 1280.298287] env[62730]: DEBUG oslo.service.loopingcall [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.298550] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8657fe0-3db2-4768-817f-944a736da401] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1280.298835] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-044f37e5-99f0-4af2-87f8-f10b342424fe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.320841] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.320841] env[62730]: value = "task-4837175" [ 1280.320841] env[62730]: _type = "Task" [ 1280.320841] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.331230] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837175, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.831314] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837175, 'name': CreateVM_Task, 'duration_secs': 0.29903} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.831494] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8657fe0-3db2-4768-817f-944a736da401] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1280.832165] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.832363] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.832698] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1280.832953] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d047a08-3c51-4299-b783-16caba765b40 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.837525] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Waiting for the task: (returnval){ [ 1280.837525] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52e0726b-06ed-d6af-5aab-2c64b0839e7e" [ 1280.837525] env[62730]: _type = "Task" [ 1280.837525] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.845683] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52e0726b-06ed-d6af-5aab-2c64b0839e7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.349095] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.349493] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.349493] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.632115] env[62730]: DEBUG nova.compute.manager [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] Received event network-changed-ea0f3248-bf5f-456b-a6d9-19be3cafc06c {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1281.632395] env[62730]: DEBUG nova.compute.manager [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] Refreshing instance network info cache due to event network-changed-ea0f3248-bf5f-456b-a6d9-19be3cafc06c. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1281.632558] env[62730]: DEBUG oslo_concurrency.lockutils [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] Acquiring lock "refresh_cache-e8657fe0-3db2-4768-817f-944a736da401" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.632706] env[62730]: DEBUG oslo_concurrency.lockutils [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] Acquired lock "refresh_cache-e8657fe0-3db2-4768-817f-944a736da401" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.632865] env[62730]: DEBUG nova.network.neutron [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] Refreshing network info cache for port ea0f3248-bf5f-456b-a6d9-19be3cafc06c {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1281.737543] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.737738] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1281.909574] env[62730]: DEBUG nova.network.neutron [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] Updated VIF entry in instance network info cache for port ea0f3248-bf5f-456b-a6d9-19be3cafc06c. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1281.910010] env[62730]: DEBUG nova.network.neutron [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] [instance: e8657fe0-3db2-4768-817f-944a736da401] Updating instance_info_cache with network_info: [{"id": "ea0f3248-bf5f-456b-a6d9-19be3cafc06c", "address": "fa:16:3e:fd:77:51", "network": {"id": "c32380e7-0dc3-4951-8c94-98017a9046fc", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-291728730-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93039e316cca49179277828e04a9ce61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea0f3248-bf", "ovs_interfaceid": "ea0f3248-bf5f-456b-a6d9-19be3cafc06c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.919583] env[62730]: DEBUG oslo_concurrency.lockutils [req-413aaff5-748f-4d32-8175-b3a3eb050faa req-759eb834-4469-4e18-ab25-2e8a66906401 service nova] Releasing lock "refresh_cache-e8657fe0-3db2-4768-817f-944a736da401" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.141788] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "e8657fe0-3db2-4768-817f-944a736da401" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.727063] env[62730]: WARNING oslo_vmware.rw_handles [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1324.727063] env[62730]: ERROR oslo_vmware.rw_handles [ 1324.727063] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1324.729426] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1324.729828] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Copying Virtual Disk [datastore2] vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/106f9ee6-91e6-42ba-a925-1d3293e271f1/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1324.730201] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bbcfc4c-a62f-4381-94b3-81432bc813c9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.739793] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Waiting for the task: (returnval){ [ 1324.739793] env[62730]: value = "task-4837176" [ 1324.739793] env[62730]: _type = "Task" [ 1324.739793] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.749425] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Task: {'id': task-4837176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.250463] env[62730]: DEBUG oslo_vmware.exceptions [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1325.250747] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.251321] env[62730]: ERROR nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.251321] env[62730]: Faults: ['InvalidArgument'] [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Traceback (most recent call last): [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] yield resources [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self.driver.spawn(context, instance, image_meta, [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self._fetch_image_if_missing(context, vi) [ 1325.251321] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] image_cache(vi, tmp_image_ds_loc) [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] vm_util.copy_virtual_disk( [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] session._wait_for_task(vmdk_copy_task) [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] return self.wait_for_task(task_ref) [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] return evt.wait() [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] result = hub.switch() [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1325.251630] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] return self.greenlet.switch() [ 1325.251972] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1325.251972] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self.f(*self.args, **self.kw) [ 1325.251972] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1325.251972] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] raise exceptions.translate_fault(task_info.error) [ 1325.251972] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1325.251972] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Faults: ['InvalidArgument'] [ 1325.251972] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] [ 1325.251972] env[62730]: INFO nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Terminating instance [ 1325.253339] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.253572] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.253895] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-688b4116-ec7c-4a7f-ae4c-aefca68b672d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.257384] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1325.257611] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1325.258616] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41bc98a-6f00-4df9-808b-760df8054641 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.265816] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1325.266091] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dafc767-45d0-4fd2-90a3-3e10aebfcf1a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.268822] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.269009] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1325.269989] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdbfe70f-bc28-4137-bbfe-33da53d790d1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.276023] env[62730]: DEBUG oslo_vmware.api [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for the task: (returnval){ [ 1325.276023] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]525024aa-4b39-24aa-e5e4-237ea7b69d34" [ 1325.276023] env[62730]: _type = "Task" [ 1325.276023] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.290399] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1325.290662] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Creating directory with path [datastore2] vmware_temp/e3b2c895-0abc-4b6a-87b8-38a7fa031bf4/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.290906] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48702068-cb5a-406d-a389-276ac9547b97 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.312365] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Created directory with path [datastore2] vmware_temp/e3b2c895-0abc-4b6a-87b8-38a7fa031bf4/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.312578] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Fetch image to [datastore2] vmware_temp/e3b2c895-0abc-4b6a-87b8-38a7fa031bf4/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1325.312756] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/e3b2c895-0abc-4b6a-87b8-38a7fa031bf4/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1325.313605] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1b1e5e-3879-47b9-9e76-8f7b29656ff5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.321475] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ab6347-c500-4342-a0c0-00e3634973ca {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.331506] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0019011f-d5fc-40d9-acfa-798377e457ae {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.368486] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9dcf5d4-a17e-4bce-ae91-0ff95985c760 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.371278] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1325.371488] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1325.371667] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Deleting the datastore file [datastore2] 04ba035f-97b6-49d1-8506-35f7d6fccb03 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.371912] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e845d5e-1ed6-469b-9cb9-de0088d25fd7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.377205] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3fcf6ba8-f93a-4682-8e70-584f80cca48a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.380220] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Waiting for the task: (returnval){ [ 1325.380220] env[62730]: value = "task-4837178" [ 1325.380220] env[62730]: _type = "Task" [ 1325.380220] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.388797] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Task: {'id': task-4837178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.409995] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1325.545088] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.545088] env[62730]: ERROR nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. [ 1325.545088] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1325.545088] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1325.545088] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1325.545088] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1325.545088] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = getattr(controller, method)(*args, **kwargs) [ 1325.545088] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1325.545088] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._get(image_id) [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] resp, body = self.http_client.get(url, headers=header) [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.request(url, 'GET', **kwargs) [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._handle_response(resp) [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exc.from_response(resp, resp.content) [ 1325.545312] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During handling of the above exception, another exception occurred: [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] yield resources [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self.driver.spawn(context, instance, image_meta, [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._fetch_image_if_missing(context, vi) [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1325.545544] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image_fetch(context, vi, tmp_image_ds_loc) [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] images.fetch_image( [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] metadata = IMAGE_API.get(context, image_ref) [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return session.show(context, image_id, [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] _reraise_translated_image_exception(image_id) [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise new_exc.with_traceback(exc_trace) [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1325.545903] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = getattr(controller, method)(*args, **kwargs) [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._get(image_id) [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] resp, body = self.http_client.get(url, headers=header) [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.request(url, 'GET', **kwargs) [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._handle_response(resp) [ 1325.546251] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1325.546492] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exc.from_response(resp, resp.content) [ 1325.546492] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] nova.exception.ImageNotAuthorized: Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. [ 1325.546492] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1325.550627] env[62730]: INFO nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Terminating instance [ 1325.550627] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.550627] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.550627] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0086b45f-2cbf-49da-8cef-f3a0d6846265 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.555694] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1325.555694] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1325.556389] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12fd043-bdc2-4949-8341-6a271350a217 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.564253] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1325.564544] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb65625f-fd0b-446f-b61f-da7383b3260e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.567098] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.567286] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1325.568620] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-080b599c-3966-4ee2-a2c0-3d30fe2e8a64 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.576705] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1325.576705] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d28cb1-20fc-1aa6-5710-6502226d93f3" [ 1325.576705] env[62730]: _type = "Task" [ 1325.576705] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.584707] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d28cb1-20fc-1aa6-5710-6502226d93f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.627079] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1325.627333] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1325.627528] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Deleting the datastore file [datastore2] b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.627815] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17ce4699-6ac4-4ff8-a4d2-e0ae9cd7d3d8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.634629] env[62730]: DEBUG oslo_vmware.api [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for the task: (returnval){ [ 1325.634629] env[62730]: value = "task-4837180" [ 1325.634629] env[62730]: _type = "Task" [ 1325.634629] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.642518] env[62730]: DEBUG oslo_vmware.api [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Task: {'id': task-4837180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.890826] env[62730]: DEBUG oslo_vmware.api [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Task: {'id': task-4837178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069876} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.891192] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.891279] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1325.891669] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1325.891669] env[62730]: INFO nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1325.894050] env[62730]: DEBUG nova.compute.claims [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1325.894244] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.894466] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.090445] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1326.090718] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating directory with path [datastore2] vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.091123] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83a1bbd1-773d-4d3d-8cf0-a8ad8bd903ea {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.105644] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created directory with path [datastore2] vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.105859] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Fetch image to [datastore2] vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1326.106046] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1326.109071] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63148206-d4d9-436d-bdbd-26b930cd254a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.116667] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6acb9b-7ffb-4723-a460-7e9787eea4e9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.128719] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cb1183-c72e-4b12-b6f6-bd6c057d11a2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.165741] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd45133a-b8c5-444b-a0b5-496a24eb2c37 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.173503] env[62730]: DEBUG oslo_vmware.api [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Task: {'id': task-4837180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063045} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.175163] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1326.175360] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1326.175539] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1326.175715] env[62730]: INFO nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1326.177811] env[62730]: DEBUG nova.compute.claims [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1326.178207] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.178445] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cc1265ce-cc46-4106-988f-bd20e1ea9e94 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.204449] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1326.259219] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fea6129-0adf-47ff-a23b-0e1a7c1896c4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.262769] env[62730]: DEBUG oslo_vmware.rw_handles [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1326.319763] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b53ec9-26f0-4e1c-a9e8-1fdf17391932 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.325560] env[62730]: DEBUG oslo_vmware.rw_handles [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1326.325760] env[62730]: DEBUG oslo_vmware.rw_handles [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1326.352823] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31767bfa-4917-4be3-9e7b-cc07acc69141 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.361157] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec22639-26de-4fb6-9596-5ac77cd7f0b6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.375019] env[62730]: DEBUG nova.compute.provider_tree [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.383866] env[62730]: DEBUG nova.scheduler.client.report [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1326.398029] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.503s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.398599] env[62730]: ERROR nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.398599] env[62730]: Faults: ['InvalidArgument'] [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Traceback (most recent call last): [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self.driver.spawn(context, instance, image_meta, [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self._fetch_image_if_missing(context, vi) [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] image_cache(vi, tmp_image_ds_loc) [ 1326.398599] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] vm_util.copy_virtual_disk( [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] session._wait_for_task(vmdk_copy_task) [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] return self.wait_for_task(task_ref) [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] return evt.wait() [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] result = hub.switch() [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] return self.greenlet.switch() [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1326.398911] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] self.f(*self.args, **self.kw) [ 1326.399197] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1326.399197] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] raise exceptions.translate_fault(task_info.error) [ 1326.399197] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.399197] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Faults: ['InvalidArgument'] [ 1326.399197] env[62730]: ERROR nova.compute.manager [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] [ 1326.399341] env[62730]: DEBUG nova.compute.utils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1326.400515] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.222s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.404168] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Build of instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 was re-scheduled: A specified parameter was not correct: fileType [ 1326.404168] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1326.404168] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1326.404168] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1326.404168] env[62730]: DEBUG nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1326.404312] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1326.704012] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad724f6-4f64-4d44-9b1a-9113df365f1c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.715326] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74656c0-6c6c-4809-b635-558ec429fd33 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.747408] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce448d4-c790-484b-81e6-2b1f86b09721 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.755617] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb039c59-e3fd-4a81-8b38-f7a62ab2e009 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.769472] env[62730]: DEBUG nova.compute.provider_tree [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.779524] env[62730]: DEBUG nova.scheduler.client.report [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1326.793128] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.393s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.793879] env[62730]: ERROR nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = getattr(controller, method)(*args, **kwargs) [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._get(image_id) [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1326.793879] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] resp, body = self.http_client.get(url, headers=header) [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.request(url, 'GET', **kwargs) [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._handle_response(resp) [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exc.from_response(resp, resp.content) [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During handling of the above exception, another exception occurred: [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.794384] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self.driver.spawn(context, instance, image_meta, [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._fetch_image_if_missing(context, vi) [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image_fetch(context, vi, tmp_image_ds_loc) [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] images.fetch_image( [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] metadata = IMAGE_API.get(context, image_ref) [ 1326.794851] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return session.show(context, image_id, [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] _reraise_translated_image_exception(image_id) [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise new_exc.with_traceback(exc_trace) [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = getattr(controller, method)(*args, **kwargs) [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._get(image_id) [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1326.795389] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] resp, body = self.http_client.get(url, headers=header) [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.request(url, 'GET', **kwargs) [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._handle_response(resp) [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exc.from_response(resp, resp.content) [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] nova.exception.ImageNotAuthorized: Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. [ 1326.795890] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.796149] env[62730]: DEBUG nova.compute.utils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1326.796149] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Build of instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 was re-scheduled: Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1326.796498] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1326.796736] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1326.796922] env[62730]: DEBUG nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1326.797124] env[62730]: DEBUG nova.network.neutron [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1326.990860] env[62730]: DEBUG neutronclient.v2_0.client [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62730) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1326.991502] env[62730]: ERROR nova.compute.manager [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = getattr(controller, method)(*args, **kwargs) [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._get(image_id) [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1326.991502] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] resp, body = self.http_client.get(url, headers=header) [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.request(url, 'GET', **kwargs) [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._handle_response(resp) [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exc.from_response(resp, resp.content) [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During handling of the above exception, another exception occurred: [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.992226] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self.driver.spawn(context, instance, image_meta, [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._fetch_image_if_missing(context, vi) [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image_fetch(context, vi, tmp_image_ds_loc) [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] images.fetch_image( [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] metadata = IMAGE_API.get(context, image_ref) [ 1326.992863] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return session.show(context, image_id, [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] _reraise_translated_image_exception(image_id) [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise new_exc.with_traceback(exc_trace) [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = getattr(controller, method)(*args, **kwargs) [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._get(image_id) [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1326.993280] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] resp, body = self.http_client.get(url, headers=header) [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.request(url, 'GET', **kwargs) [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self._handle_response(resp) [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exc.from_response(resp, resp.content) [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] nova.exception.ImageNotAuthorized: Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During handling of the above exception, another exception occurred: [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.993634] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._build_and_run_instance(context, instance, image, [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exception.RescheduledException( [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] nova.exception.RescheduledException: Build of instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 was re-scheduled: Not authorized for image a46adab9-3ef5-4b2e-8d44-bab77576ed71. [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During handling of the above exception, another exception occurred: [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1326.993965] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] exception_handler_v20(status_code, error_body) [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise client_exc(message=error_message, [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Neutron server returns request_ids: ['req-9692b99d-9103-4392-8387-f822cf69c454'] [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During handling of the above exception, another exception occurred: [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._deallocate_network(context, instance, requested_networks) [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self.network_api.deallocate_for_instance( [ 1326.994338] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] data = neutron.list_ports(**search_opts) [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.list('ports', self.ports_path, retrieve_all, [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] for r in self._pagination(collection, path, **params): [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] res = self.get(path, params=params) [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.994699] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.retry_request("GET", action, body=body, [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.do_request(method, action, body=body, [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._handle_fault_response(status_code, replybody, resp) [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exception.Unauthorized() [ 1326.995048] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] nova.exception.Unauthorized: Not authorized. [ 1326.995381] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1327.028835] env[62730]: DEBUG nova.network.neutron [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.081101] env[62730]: INFO nova.compute.manager [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Took 0.68 seconds to deallocate network for instance. [ 1327.112844] env[62730]: INFO nova.scheduler.client.report [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Deleted allocations for instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 [ 1327.143426] env[62730]: DEBUG oslo_concurrency.lockutils [None req-47e551d7-95c2-463c-a4f8-acfa72b8b7e8 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.263s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.145261] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.030s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.145584] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.145851] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.146480] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.148068] env[62730]: INFO nova.compute.manager [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Terminating instance [ 1327.149793] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquiring lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.149926] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Acquired lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.150164] env[62730]: DEBUG nova.network.neutron [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1327.153688] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1327.216702] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.217083] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.219510] env[62730]: INFO nova.compute.claims [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1327.245693] env[62730]: INFO nova.scheduler.client.report [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Deleted allocations for instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 [ 1327.283681] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c61f4649-bc59-4599-b1fc-3e91b9975a85 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 688.910s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.285106] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 483.502s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.285972] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Acquiring lock "04ba035f-97b6-49d1-8506-35f7d6fccb03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.285972] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.285972] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.288387] env[62730]: INFO nova.compute.manager [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Terminating instance [ 1327.292302] env[62730]: DEBUG nova.compute.manager [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1327.292507] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1327.292990] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e05c7bf-e5e1-4e6e-8a14-894bdfcabc44 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.304266] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1327.309984] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246662c9-2e39-4496-920c-35bb4f7ea46b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.346606] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04ba035f-97b6-49d1-8506-35f7d6fccb03 could not be found. [ 1327.346844] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1327.347087] env[62730]: INFO nova.compute.manager [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1327.347364] env[62730]: DEBUG oslo.service.loopingcall [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.352845] env[62730]: DEBUG nova.compute.manager [-] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1327.352966] env[62730]: DEBUG nova.network.neutron [-] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1327.369769] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.398443] env[62730]: DEBUG nova.network.neutron [-] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.408726] env[62730]: INFO nova.compute.manager [-] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] Took 0.06 seconds to deallocate network for instance. [ 1327.522178] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ae8ebce-34ef-4a18-a8da-345aaf4f0101 tempest-ListServerFiltersTestJSON-1817198581 tempest-ListServerFiltersTestJSON-1817198581-project-member] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.237s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.526023] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 233.471s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.526023] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 04ba035f-97b6-49d1-8506-35f7d6fccb03] During sync_power_state the instance has a pending task (deleting). Skip. [ 1327.526023] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "04ba035f-97b6-49d1-8506-35f7d6fccb03" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.598737] env[62730]: DEBUG nova.network.neutron [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Updating instance_info_cache with network_info: [{"id": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "address": "fa:16:3e:fe:89:1e", "network": {"id": "3f89fe56-0bdd-4a7e-b7f4-b089688f0c6a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "05ec08bc94b84623a044562d4cbaee75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4349e30-c086-4c24-9e0e-83996d808a1b", "external-id": "nsx-vlan-transportzone-266", "segmentation_id": 266, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ec47e1b-ed", "ovs_interfaceid": "6ec47e1b-edbd-459e-a11d-cca8ecb06110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.607891] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Releasing lock "refresh_cache-b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.608343] env[62730]: DEBUG nova.compute.manager [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1327.608541] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1327.609120] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3aa9658-b35e-4920-bb0c-5c41dc59b2fc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.613613] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7f49be-e805-4c21-99f5-41093d0e8fe9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.620502] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7c20e7-9623-49a2-8438-e2eab5fc47c4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.636693] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e547f1-05f0-454c-aaff-25d4d60f1c60 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.685050] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac5c34e-c2e3-4b25-9085-7d66eddeac10 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.687989] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856 could not be found. [ 1327.688241] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1327.688443] env[62730]: INFO nova.compute.manager [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1327.688718] env[62730]: DEBUG oslo.service.loopingcall [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.689082] env[62730]: DEBUG nova.compute.manager [-] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1327.689177] env[62730]: DEBUG nova.network.neutron [-] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1327.697088] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8d8660-d57c-45eb-b526-42eb574b8d32 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.711937] env[62730]: DEBUG nova.compute.provider_tree [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.722587] env[62730]: DEBUG nova.scheduler.client.report [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1327.740548] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.523s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.741079] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1327.743562] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.374s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.745466] env[62730]: INFO nova.compute.claims [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1327.784436] env[62730]: DEBUG nova.compute.utils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1327.785769] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1327.785970] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1327.795880] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1327.822561] env[62730]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62730) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1327.822834] env[62730]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-d2f4e819-8eba-4db9-a330-c49abfa7517c'] [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1327.824039] env[62730]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1327.824458] env[62730]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.824848] env[62730]: ERROR oslo.service.loopingcall [ 1327.825200] env[62730]: ERROR nova.compute.manager [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.865771] env[62730]: ERROR nova.compute.manager [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] exception_handler_v20(status_code, error_body) [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise client_exc(message=error_message, [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Neutron server returns request_ids: ['req-d2f4e819-8eba-4db9-a330-c49abfa7517c'] [ 1327.865771] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During handling of the above exception, another exception occurred: [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Traceback (most recent call last): [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._delete_instance(context, instance, bdms) [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._shutdown_instance(context, instance, bdms) [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._try_deallocate_network(context, instance, requested_networks) [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] with excutils.save_and_reraise_exception(): [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.866208] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self.force_reraise() [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise self.value [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] _deallocate_network_with_retries() [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return evt.wait() [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = hub.switch() [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.greenlet.switch() [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1327.866693] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = func(*self.args, **self.kw) [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] result = f(*args, **kwargs) [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._deallocate_network( [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self.network_api.deallocate_for_instance( [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] data = neutron.list_ports(**search_opts) [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.list('ports', self.ports_path, retrieve_all, [ 1327.867069] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] for r in self._pagination(collection, path, **params): [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] res = self.get(path, params=params) [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.retry_request("GET", action, body=body, [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1327.867534] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] return self.do_request(method, action, body=body, [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] ret = obj(*args, **kwargs) [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] self._handle_fault_response(status_code, replybody, resp) [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.867905] env[62730]: ERROR nova.compute.manager [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] [ 1327.873053] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1327.900342] env[62730]: DEBUG oslo_concurrency.lockutils [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.755s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.903048] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1327.903288] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1327.903447] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.903632] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1327.903780] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.903932] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1327.904157] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1327.904330] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1327.904497] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1327.904665] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1327.904836] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1327.905155] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 233.853s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.905334] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] During sync_power_state the instance has a pending task (deleting). Skip. [ 1327.905533] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.906828] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef74aa0-6d53-44e4-b192-6b1147ac48e1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.910661] env[62730]: DEBUG nova.policy [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cd5284131a047c5826c253495b16a0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d775e3135484ed8b81c9d2991f2bedb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1327.922636] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b87b8f-a1f5-4fba-b1cc-57a639296bb2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.989834] env[62730]: INFO nova.compute.manager [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] [instance: b6c2b9a0-44a3-43ae-a2db-5d9dc3acf856] Successfully reverted task state from None on failure for instance. [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server [None req-32dd6f1a-dbad-4237-bff8-bdda286892f2 tempest-DeleteServersAdminTestJSON-956385282 tempest-DeleteServersAdminTestJSON-956385282-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-d2f4e819-8eba-4db9-a330-c49abfa7517c'] [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1327.994707] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1327.995427] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1327.995878] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1327.996344] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1327.997049] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1327.997546] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1327.997993] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1327.997993] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1327.997993] env[62730]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1327.997993] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1327.997993] env[62730]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1327.997993] env[62730]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1327.997993] env[62730]: ERROR oslo_messaging.rpc.server [ 1328.100905] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49669cd-2942-45e5-a0d4-3f0d691bebee {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.110367] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdbb9b9-4c96-4fc0-9025-46992b9ca0ca {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.139283] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7a0d39-b03c-4492-adcf-23a9ba8bc716 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.147215] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1359231e-539c-4d6a-9704-e1f744b1363d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.161439] env[62730]: DEBUG nova.compute.provider_tree [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1328.170854] env[62730]: DEBUG nova.scheduler.client.report [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1328.186935] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.443s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.187458] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1328.223022] env[62730]: DEBUG nova.compute.utils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1328.225459] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1328.225639] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1328.233668] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1328.306348] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1328.329428] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1328.329683] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1328.329889] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1328.330045] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1328.330200] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1328.330365] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1328.330575] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1328.330736] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1328.330904] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1328.331092] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1328.331272] env[62730]: DEBUG nova.virt.hardware [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1328.332187] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e799318-6c61-4b7b-977a-db9d97ca68b0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.340534] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ff1fdd-fc71-4377-b5ed-1168bfd2c469 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.376280] env[62730]: DEBUG nova.policy [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cd5284131a047c5826c253495b16a0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d775e3135484ed8b81c9d2991f2bedb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1328.481801] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Successfully created port: febcf67a-b6f8-4eda-8772-62d951794f0e {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1329.453737] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Successfully created port: 05996206-decd-485e-9261-af6ba0e97211 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1330.134983] env[62730]: DEBUG nova.compute.manager [req-7932a7d2-ace4-4e47-b471-18deecc2087e req-772d25b8-6ba0-4e2f-a0e6-4bc099fff9f9 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Received event network-vif-plugged-febcf67a-b6f8-4eda-8772-62d951794f0e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1330.135402] env[62730]: DEBUG oslo_concurrency.lockutils [req-7932a7d2-ace4-4e47-b471-18deecc2087e req-772d25b8-6ba0-4e2f-a0e6-4bc099fff9f9 service nova] Acquiring lock "d276dbe7-a0fc-4518-9006-a0d749c07984-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.135743] env[62730]: DEBUG oslo_concurrency.lockutils [req-7932a7d2-ace4-4e47-b471-18deecc2087e req-772d25b8-6ba0-4e2f-a0e6-4bc099fff9f9 service nova] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.136060] env[62730]: DEBUG oslo_concurrency.lockutils [req-7932a7d2-ace4-4e47-b471-18deecc2087e req-772d25b8-6ba0-4e2f-a0e6-4bc099fff9f9 service nova] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.137546] env[62730]: DEBUG nova.compute.manager [req-7932a7d2-ace4-4e47-b471-18deecc2087e req-772d25b8-6ba0-4e2f-a0e6-4bc099fff9f9 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] No waiting events found dispatching network-vif-plugged-febcf67a-b6f8-4eda-8772-62d951794f0e {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1330.137546] env[62730]: WARNING nova.compute.manager [req-7932a7d2-ace4-4e47-b471-18deecc2087e req-772d25b8-6ba0-4e2f-a0e6-4bc099fff9f9 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Received unexpected event network-vif-plugged-febcf67a-b6f8-4eda-8772-62d951794f0e for instance with vm_state building and task_state spawning. [ 1330.278676] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Successfully updated port: febcf67a-b6f8-4eda-8772-62d951794f0e {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1330.293845] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "refresh_cache-d276dbe7-a0fc-4518-9006-a0d749c07984" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.294018] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "refresh_cache-d276dbe7-a0fc-4518-9006-a0d749c07984" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.294182] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1330.362870] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1331.059788] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Updating instance_info_cache with network_info: [{"id": "febcf67a-b6f8-4eda-8772-62d951794f0e", "address": "fa:16:3e:89:be:1a", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebcf67a-b6", "ovs_interfaceid": "febcf67a-b6f8-4eda-8772-62d951794f0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.080165] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "refresh_cache-d276dbe7-a0fc-4518-9006-a0d749c07984" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.080786] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Instance network_info: |[{"id": "febcf67a-b6f8-4eda-8772-62d951794f0e", "address": "fa:16:3e:89:be:1a", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebcf67a-b6", "ovs_interfaceid": "febcf67a-b6f8-4eda-8772-62d951794f0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1331.081352] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:be:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c13fd8bc-e797-42fe-94ed-6370d3467a7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'febcf67a-b6f8-4eda-8772-62d951794f0e', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1331.088859] env[62730]: DEBUG oslo.service.loopingcall [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1331.089386] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1331.089633] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bbe7ff0f-427d-43fb-a260-ed475e2678bc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.110588] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1331.110588] env[62730]: value = "task-4837181" [ 1331.110588] env[62730]: _type = "Task" [ 1331.110588] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.120013] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837181, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.238574] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Successfully updated port: 05996206-decd-485e-9261-af6ba0e97211 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1331.248910] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "refresh_cache-3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.249135] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "refresh_cache-3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.249394] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1331.335628] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1331.621091] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837181, 'name': CreateVM_Task, 'duration_secs': 0.321156} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.621287] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1331.621985] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.622178] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.622514] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1331.622810] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-680bf553-85b5-4645-af5d-bb33e4d37269 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.627776] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1331.627776] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52f46b88-b740-e823-4809-12765233557a" [ 1331.627776] env[62730]: _type = "Task" [ 1331.627776] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.636428] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52f46b88-b740-e823-4809-12765233557a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.723843] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Updating instance_info_cache with network_info: [{"id": "05996206-decd-485e-9261-af6ba0e97211", "address": "fa:16:3e:28:44:33", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05996206-de", "ovs_interfaceid": "05996206-decd-485e-9261-af6ba0e97211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.733527] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1331.736103] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "refresh_cache-3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.736103] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Instance network_info: |[{"id": "05996206-decd-485e-9261-af6ba0e97211", "address": "fa:16:3e:28:44:33", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05996206-de", "ovs_interfaceid": "05996206-decd-485e-9261-af6ba0e97211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1331.736689] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:44:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c13fd8bc-e797-42fe-94ed-6370d3467a7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05996206-decd-485e-9261-af6ba0e97211', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1331.744172] env[62730]: DEBUG oslo.service.loopingcall [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1331.744570] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1331.744795] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d56ccec-2fc9-46fc-946b-c03fde46708e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.765733] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1331.771847] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1331.771847] env[62730]: value = "task-4837182" [ 1331.771847] env[62730]: _type = "Task" [ 1331.771847] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.780769] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837182, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.139274] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.139541] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1332.139803] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.188072] env[62730]: DEBUG nova.compute.manager [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Received event network-changed-febcf67a-b6f8-4eda-8772-62d951794f0e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1332.188346] env[62730]: DEBUG nova.compute.manager [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Refreshing instance network info cache due to event network-changed-febcf67a-b6f8-4eda-8772-62d951794f0e. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1332.188678] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Acquiring lock "refresh_cache-d276dbe7-a0fc-4518-9006-a0d749c07984" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.189039] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Acquired lock "refresh_cache-d276dbe7-a0fc-4518-9006-a0d749c07984" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.189244] env[62730]: DEBUG nova.network.neutron [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Refreshing network info cache for port febcf67a-b6f8-4eda-8772-62d951794f0e {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1332.283300] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837182, 'name': CreateVM_Task, 'duration_secs': 0.320351} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.283493] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1332.284193] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.284362] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.284701] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1332.284971] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53bf37d6-a325-48d2-9676-3dd3d1d7a9a8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.291095] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1332.291095] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]520ea531-8ae4-87f0-ccaf-72c4f8dbea5f" [ 1332.291095] env[62730]: _type = "Task" [ 1332.291095] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.301590] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]520ea531-8ae4-87f0-ccaf-72c4f8dbea5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.553413] env[62730]: DEBUG nova.network.neutron [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Updated VIF entry in instance network info cache for port febcf67a-b6f8-4eda-8772-62d951794f0e. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1332.553816] env[62730]: DEBUG nova.network.neutron [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Updating instance_info_cache with network_info: [{"id": "febcf67a-b6f8-4eda-8772-62d951794f0e", "address": "fa:16:3e:89:be:1a", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebcf67a-b6", "ovs_interfaceid": "febcf67a-b6f8-4eda-8772-62d951794f0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.565824] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Releasing lock "refresh_cache-d276dbe7-a0fc-4518-9006-a0d749c07984" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.566096] env[62730]: DEBUG nova.compute.manager [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Received event network-vif-plugged-05996206-decd-485e-9261-af6ba0e97211 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1332.566291] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Acquiring lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.566494] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.566682] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.566864] env[62730]: DEBUG nova.compute.manager [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] No waiting events found dispatching network-vif-plugged-05996206-decd-485e-9261-af6ba0e97211 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1332.567045] env[62730]: WARNING nova.compute.manager [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Received unexpected event network-vif-plugged-05996206-decd-485e-9261-af6ba0e97211 for instance with vm_state building and task_state spawning. [ 1332.567217] env[62730]: DEBUG nova.compute.manager [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Received event network-changed-05996206-decd-485e-9261-af6ba0e97211 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1332.567372] env[62730]: DEBUG nova.compute.manager [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Refreshing instance network info cache due to event network-changed-05996206-decd-485e-9261-af6ba0e97211. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1332.567558] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Acquiring lock "refresh_cache-3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.567691] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Acquired lock "refresh_cache-3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.567853] env[62730]: DEBUG nova.network.neutron [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Refreshing network info cache for port 05996206-decd-485e-9261-af6ba0e97211 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1332.740363] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.740611] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.802157] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.802366] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1332.802550] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.005937] env[62730]: DEBUG nova.network.neutron [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Updated VIF entry in instance network info cache for port 05996206-decd-485e-9261-af6ba0e97211. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1333.006311] env[62730]: DEBUG nova.network.neutron [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Updating instance_info_cache with network_info: [{"id": "05996206-decd-485e-9261-af6ba0e97211", "address": "fa:16:3e:28:44:33", "network": {"id": "d63def3d-0e47-4260-ada5-c9b2e96ec3c8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-168264789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d775e3135484ed8b81c9d2991f2bedb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c13fd8bc-e797-42fe-94ed-6370d3467a7f", "external-id": "nsx-vlan-transportzone-833", "segmentation_id": 833, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05996206-de", "ovs_interfaceid": "05996206-decd-485e-9261-af6ba0e97211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.017439] env[62730]: DEBUG oslo_concurrency.lockutils [req-97d95efe-9e6c-4d2e-a76c-38ec1768455a req-e7961aa2-f09b-4e28-a6e8-960cca8cafb7 service nova] Releasing lock "refresh_cache-3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.737373] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.737721] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.738080] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.738080] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1336.738080] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1336.763223] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.763368] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.763495] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.763625] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.763751] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.763874] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.763994] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.764132] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: e8657fe0-3db2-4768-817f-944a736da401] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.764252] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.764368] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1336.764486] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1337.519565] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.599635] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "d276dbe7-a0fc-4518-9006-a0d749c07984" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.737127] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.750301] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.750642] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.750764] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.750974] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1337.752095] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47778e64-a215-408e-8751-6b3445cabaa5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.761024] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24eee4e4-895a-4109-b860-e7b2da23869f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.775509] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a52db4-4732-4f8c-9494-830c9b5bc8b9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.783277] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00b3ce7-a36a-4e7b-a067-9670851f268e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.814362] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180534MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1337.814524] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.814727] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.891279] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.891472] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.891624] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.891828] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.892011] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.892185] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.892313] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.892434] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.892554] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.892672] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1337.907128] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 175517cd-b112-4aa4-87e0-e74c1d9a07fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.919141] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.931038] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.941693] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 07bb9890-0ebe-4ce3-98b9-2fe35a9a6796 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.954195] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance be7f1a05-96f9-430c-b5ad-13fa1aae685b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.964579] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c0ada899-0ddb-456a-a1f3-097529654318 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.975016] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f5f9fdc8-ca89-438e-a710-b3e1dd85f550 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.985531] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1337.985924] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1337.985924] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '65', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '3', 'io_workload': '10', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_c54046535dc74172a58cc8e350f2d88d': '1', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_9992614978224ad7bd8ed947a0cf69bc': '1', 'num_task_spawning': '1', 'num_proj_06d1cb82c61344ebb38e2ef9a6c95a6c': '1', 'num_proj_93039e316cca49179277828e04a9ce61': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1338.211901] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283a8d82-5c29-4d25-b334-944d65b36fae {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.219762] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2f913c-9d5d-4291-9313-f96818e32861 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.251449] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d025f4a-48c4-4d2d-9cb0-84dd91769542 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.259848] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62ef7ca-dbff-4a59-aa65-6b254bedffd5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.273663] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.283744] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1338.302438] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1338.302647] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.488s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.303235] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1342.737822] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1342.738188] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1362.973931] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Acquiring lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.973931] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.847984] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.848295] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.246569] env[62730]: WARNING oslo_vmware.rw_handles [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1373.246569] env[62730]: ERROR oslo_vmware.rw_handles [ 1373.247278] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1373.248949] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1373.249264] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Copying Virtual Disk [datastore2] vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/a2093e26-5754-4fc2-b681-cf13d424f99f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1373.249556] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59e404df-9c19-4429-8a9c-33195aebd07b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.257986] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1373.257986] env[62730]: value = "task-4837193" [ 1373.257986] env[62730]: _type = "Task" [ 1373.257986] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.266859] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.770213] env[62730]: DEBUG oslo_vmware.exceptions [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1373.770516] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.771168] env[62730]: ERROR nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1373.771168] env[62730]: Faults: ['InvalidArgument'] [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Traceback (most recent call last): [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] yield resources [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self.driver.spawn(context, instance, image_meta, [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self._fetch_image_if_missing(context, vi) [ 1373.771168] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] image_cache(vi, tmp_image_ds_loc) [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] vm_util.copy_virtual_disk( [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] session._wait_for_task(vmdk_copy_task) [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] return self.wait_for_task(task_ref) [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] return evt.wait() [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] result = hub.switch() [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1373.771572] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] return self.greenlet.switch() [ 1373.771912] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1373.771912] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self.f(*self.args, **self.kw) [ 1373.771912] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1373.771912] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] raise exceptions.translate_fault(task_info.error) [ 1373.771912] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1373.771912] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Faults: ['InvalidArgument'] [ 1373.771912] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] [ 1373.771912] env[62730]: INFO nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Terminating instance [ 1373.773383] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.773644] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1373.773913] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f26af6e-65c5-4371-8300-94592a0eba50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.776379] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1373.776577] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1373.777355] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de358ff-b281-4692-b037-80da52f99f48 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.784679] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1373.784934] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa8754c5-4eca-45e5-a921-61b3b23cd3fc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.787301] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1373.787477] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1373.788451] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3a990bc-384e-470d-969d-e430ae728f30 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.793633] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 1373.793633] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52eeb4d7-94f9-ab33-d67b-802885f64b5f" [ 1373.793633] env[62730]: _type = "Task" [ 1373.793633] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.801774] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52eeb4d7-94f9-ab33-d67b-802885f64b5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.850716] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1373.850955] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1373.851129] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleting the datastore file [datastore2] 22f72732-e5e2-49dc-810a-ab90d7a367a0 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1373.851507] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ef5f251-bce9-498a-bd63-de431d2f0310 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.858092] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1373.858092] env[62730]: value = "task-4837195" [ 1373.858092] env[62730]: _type = "Task" [ 1373.858092] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.865991] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.304955] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1374.304955] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating directory with path [datastore2] vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1374.305348] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b48af34-f917-4b97-93ca-10c957edbd7d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.316945] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Created directory with path [datastore2] vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1374.317173] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Fetch image to [datastore2] vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1374.317351] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1374.318216] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad24930-d293-461e-b417-9e5f9f0946c6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.326271] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f935a3f1-e7c8-4095-a701-5adfb2557219 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.336466] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02e109a-0ec3-4cb6-8d1e-1382c33b2a90 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.373237] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d7df51-b152-4b27-956d-936c398246eb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.383027] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-98045b10-114c-4bdb-9814-6036b9262456 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.384825] env[62730]: DEBUG oslo_vmware.api [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072715} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.384825] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1374.384951] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1374.385109] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1374.385297] env[62730]: INFO nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1374.387452] env[62730]: DEBUG nova.compute.claims [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1374.387659] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.387940] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.411914] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1374.468683] env[62730]: DEBUG oslo_vmware.rw_handles [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1374.530265] env[62730]: DEBUG oslo_vmware.rw_handles [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1374.530475] env[62730]: DEBUG oslo_vmware.rw_handles [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1374.745106] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b3a34b-060f-4896-9864-5537a948cc9d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.753606] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a9f700-8a9e-46e2-9ebd-778e62ce4e28 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.784738] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07372dc-3bdb-48d7-8926-9325172475b8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.794152] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d1f062-c9a8-4d33-8d0d-55fa0a29246a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.809262] env[62730]: DEBUG nova.compute.provider_tree [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.818210] env[62730]: DEBUG nova.scheduler.client.report [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1374.833923] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.445s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.833923] env[62730]: ERROR nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1374.833923] env[62730]: Faults: ['InvalidArgument'] [ 1374.833923] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Traceback (most recent call last): [ 1374.833923] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1374.833923] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self.driver.spawn(context, instance, image_meta, [ 1374.833923] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1374.833923] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1374.833923] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1374.833923] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self._fetch_image_if_missing(context, vi) [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] image_cache(vi, tmp_image_ds_loc) [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] vm_util.copy_virtual_disk( [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] session._wait_for_task(vmdk_copy_task) [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] return self.wait_for_task(task_ref) [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] return evt.wait() [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] result = hub.switch() [ 1374.834282] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] return self.greenlet.switch() [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] self.f(*self.args, **self.kw) [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] raise exceptions.translate_fault(task_info.error) [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Faults: ['InvalidArgument'] [ 1374.834625] env[62730]: ERROR nova.compute.manager [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] [ 1374.834625] env[62730]: DEBUG nova.compute.utils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1374.836014] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Build of instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 was re-scheduled: A specified parameter was not correct: fileType [ 1374.836014] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1374.836438] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1374.836618] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1374.836795] env[62730]: DEBUG nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1374.836965] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1375.176462] env[62730]: DEBUG nova.network.neutron [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.191069] env[62730]: INFO nova.compute.manager [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Took 0.35 seconds to deallocate network for instance. [ 1375.306048] env[62730]: INFO nova.scheduler.client.report [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleted allocations for instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 [ 1375.331177] env[62730]: DEBUG oslo_concurrency.lockutils [None req-733dc369-74e6-4f58-aa92-982cd0fa9fe9 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.757s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.332464] env[62730]: DEBUG oslo_concurrency.lockutils [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.542s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.333254] env[62730]: DEBUG oslo_concurrency.lockutils [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "22f72732-e5e2-49dc-810a-ab90d7a367a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.333254] env[62730]: DEBUG oslo_concurrency.lockutils [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.333254] env[62730]: DEBUG oslo_concurrency.lockutils [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.335209] env[62730]: INFO nova.compute.manager [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Terminating instance [ 1375.337128] env[62730]: DEBUG nova.compute.manager [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1375.337335] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1375.338288] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b69551e4-f7e1-45d2-8e16-44e6b69ae494 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.351051] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73eef09-cd23-4017-8bcc-33c6d92c19e5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.368567] env[62730]: DEBUG nova.compute.manager [None req-1f5aca42-0ea5-4ac3-a208-1d8e40fb9005 tempest-ServersAaction247Test-1557020538 tempest-ServersAaction247Test-1557020538-project-member] [instance: 175517cd-b112-4aa4-87e0-e74c1d9a07fe] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1375.395393] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 22f72732-e5e2-49dc-810a-ab90d7a367a0 could not be found. [ 1375.395573] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1375.395759] env[62730]: INFO nova.compute.manager [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1375.396027] env[62730]: DEBUG oslo.service.loopingcall [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.396280] env[62730]: DEBUG nova.compute.manager [-] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1375.396379] env[62730]: DEBUG nova.network.neutron [-] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1375.399463] env[62730]: DEBUG nova.compute.manager [None req-1f5aca42-0ea5-4ac3-a208-1d8e40fb9005 tempest-ServersAaction247Test-1557020538 tempest-ServersAaction247Test-1557020538-project-member] [instance: 175517cd-b112-4aa4-87e0-e74c1d9a07fe] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1375.421656] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1f5aca42-0ea5-4ac3-a208-1d8e40fb9005 tempest-ServersAaction247Test-1557020538 tempest-ServersAaction247Test-1557020538-project-member] Lock "175517cd-b112-4aa4-87e0-e74c1d9a07fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.402s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.431429] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1375.443177] env[62730]: DEBUG nova.network.neutron [-] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.454552] env[62730]: INFO nova.compute.manager [-] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] Took 0.06 seconds to deallocate network for instance. [ 1375.494139] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.494411] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.496020] env[62730]: INFO nova.compute.claims [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1375.570566] env[62730]: DEBUG oslo_concurrency.lockutils [None req-fd5db39a-c6c3-4a93-9acb-c9e5b2ae3c53 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.238s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.571847] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 281.520s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.572679] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 22f72732-e5e2-49dc-810a-ab90d7a367a0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1375.572679] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "22f72732-e5e2-49dc-810a-ab90d7a367a0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.810467] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3611a346-a2b4-403f-a368-ff7d8205f352 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.818450] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ae12ef-ce77-404f-bf96-052592057ce7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.849215] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564c180b-0c61-4e3c-853f-acc1b7cb0d40 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.857248] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c5a129-287e-4eba-92b5-e6af0f8e5954 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.870805] env[62730]: DEBUG nova.compute.provider_tree [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.880123] env[62730]: DEBUG nova.scheduler.client.report [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1375.898835] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.404s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.899539] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1375.939398] env[62730]: DEBUG nova.compute.utils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1375.940747] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1375.940923] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1375.950848] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1376.020718] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1376.048121] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1376.048121] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1376.048121] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.048292] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1376.048292] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.048292] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1376.048292] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1376.048624] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1376.048957] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1376.049283] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1376.049580] env[62730]: DEBUG nova.virt.hardware [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1376.050573] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd43c5b-5ffa-41c3-a2bb-fb9a2820f4c6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.058941] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5125a2-6b80-4088-ba69-65a855e327a3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.261826] env[62730]: DEBUG nova.policy [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd7956b9bdc3d411ba310f4114dd15039', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39999c4fd29e4266ac76cfbe0c95df4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1376.786991] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Successfully created port: bebaf9d9-6be7-49c1-a26a-6628cc301ac2 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1377.564150] env[62730]: DEBUG nova.compute.manager [req-e45af9ae-64d5-4bc1-ae4d-802e790fdc6b req-859b0330-40e6-4114-b0b8-9a109f75439d service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Received event network-vif-plugged-bebaf9d9-6be7-49c1-a26a-6628cc301ac2 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1377.564412] env[62730]: DEBUG oslo_concurrency.lockutils [req-e45af9ae-64d5-4bc1-ae4d-802e790fdc6b req-859b0330-40e6-4114-b0b8-9a109f75439d service nova] Acquiring lock "9c36edef-9792-4f26-88c0-94a07eb1f588-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.564627] env[62730]: DEBUG oslo_concurrency.lockutils [req-e45af9ae-64d5-4bc1-ae4d-802e790fdc6b req-859b0330-40e6-4114-b0b8-9a109f75439d service nova] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.564798] env[62730]: DEBUG oslo_concurrency.lockutils [req-e45af9ae-64d5-4bc1-ae4d-802e790fdc6b req-859b0330-40e6-4114-b0b8-9a109f75439d service nova] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.565168] env[62730]: DEBUG nova.compute.manager [req-e45af9ae-64d5-4bc1-ae4d-802e790fdc6b req-859b0330-40e6-4114-b0b8-9a109f75439d service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] No waiting events found dispatching network-vif-plugged-bebaf9d9-6be7-49c1-a26a-6628cc301ac2 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1377.565419] env[62730]: WARNING nova.compute.manager [req-e45af9ae-64d5-4bc1-ae4d-802e790fdc6b req-859b0330-40e6-4114-b0b8-9a109f75439d service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Received unexpected event network-vif-plugged-bebaf9d9-6be7-49c1-a26a-6628cc301ac2 for instance with vm_state building and task_state spawning. [ 1377.679104] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Successfully updated port: bebaf9d9-6be7-49c1-a26a-6628cc301ac2 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1377.700366] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "refresh_cache-9c36edef-9792-4f26-88c0-94a07eb1f588" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.700562] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquired lock "refresh_cache-9c36edef-9792-4f26-88c0-94a07eb1f588" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.700719] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1377.779147] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1378.065723] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Updating instance_info_cache with network_info: [{"id": "bebaf9d9-6be7-49c1-a26a-6628cc301ac2", "address": "fa:16:3e:94:16:25", "network": {"id": "1d6596a5-15bb-4a37-b835-85115e0c79d1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-964634354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39999c4fd29e4266ac76cfbe0c95df4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbebaf9d9-6b", "ovs_interfaceid": "bebaf9d9-6be7-49c1-a26a-6628cc301ac2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.083928] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Releasing lock "refresh_cache-9c36edef-9792-4f26-88c0-94a07eb1f588" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.084265] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Instance network_info: |[{"id": "bebaf9d9-6be7-49c1-a26a-6628cc301ac2", "address": "fa:16:3e:94:16:25", "network": {"id": "1d6596a5-15bb-4a37-b835-85115e0c79d1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-964634354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39999c4fd29e4266ac76cfbe0c95df4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbebaf9d9-6b", "ovs_interfaceid": "bebaf9d9-6be7-49c1-a26a-6628cc301ac2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1378.084683] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:16:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bebaf9d9-6be7-49c1-a26a-6628cc301ac2', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1378.092449] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Creating folder: Project (39999c4fd29e4266ac76cfbe0c95df4d). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1378.093053] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c86a7bfb-d996-4b97-992a-3678568f51d5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.104308] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Created folder: Project (39999c4fd29e4266ac76cfbe0c95df4d) in parent group-v942928. [ 1378.104511] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Creating folder: Instances. Parent ref: group-v943008. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1378.104832] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8f57e05-366d-4c5c-84bf-50eff53bb3f3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.114046] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Created folder: Instances in parent group-v943008. [ 1378.114396] env[62730]: DEBUG oslo.service.loopingcall [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.114594] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1378.114834] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fff2d1f-5d44-49fe-8d17-af05ac27dbd6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.134381] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1378.134381] env[62730]: value = "task-4837198" [ 1378.134381] env[62730]: _type = "Task" [ 1378.134381] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.142383] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837198, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.644599] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837198, 'name': CreateVM_Task, 'duration_secs': 0.348016} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.644832] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1378.645481] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.645655] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.645991] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1378.646266] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b69f787b-5d70-4429-8a00-87e69b9ad99b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.651613] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Waiting for the task: (returnval){ [ 1378.651613] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52590a19-767f-27e6-028c-004599b6f66a" [ 1378.651613] env[62730]: _type = "Task" [ 1378.651613] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.662722] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52590a19-767f-27e6-028c-004599b6f66a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.162537] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.162908] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1379.163019] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.595296] env[62730]: DEBUG nova.compute.manager [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Received event network-changed-bebaf9d9-6be7-49c1-a26a-6628cc301ac2 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1379.595510] env[62730]: DEBUG nova.compute.manager [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Refreshing instance network info cache due to event network-changed-bebaf9d9-6be7-49c1-a26a-6628cc301ac2. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1379.595739] env[62730]: DEBUG oslo_concurrency.lockutils [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] Acquiring lock "refresh_cache-9c36edef-9792-4f26-88c0-94a07eb1f588" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.595882] env[62730]: DEBUG oslo_concurrency.lockutils [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] Acquired lock "refresh_cache-9c36edef-9792-4f26-88c0-94a07eb1f588" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.596055] env[62730]: DEBUG nova.network.neutron [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Refreshing network info cache for port bebaf9d9-6be7-49c1-a26a-6628cc301ac2 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1379.955045] env[62730]: DEBUG nova.network.neutron [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Updated VIF entry in instance network info cache for port bebaf9d9-6be7-49c1-a26a-6628cc301ac2. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1379.955316] env[62730]: DEBUG nova.network.neutron [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Updating instance_info_cache with network_info: [{"id": "bebaf9d9-6be7-49c1-a26a-6628cc301ac2", "address": "fa:16:3e:94:16:25", "network": {"id": "1d6596a5-15bb-4a37-b835-85115e0c79d1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-964634354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39999c4fd29e4266ac76cfbe0c95df4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbebaf9d9-6b", "ovs_interfaceid": "bebaf9d9-6be7-49c1-a26a-6628cc301ac2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.966500] env[62730]: DEBUG oslo_concurrency.lockutils [req-83d8352d-18ef-459a-b7cf-a0c8a226c005 req-70403ab9-bab0-4a7f-a127-6c20aff58228 service nova] Releasing lock "refresh_cache-9c36edef-9792-4f26-88c0-94a07eb1f588" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.595453] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "9c36edef-9792-4f26-88c0-94a07eb1f588" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.774926] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.775271] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.168668] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.168668] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.737739] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.745853] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.737823] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.733577] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.737286] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.737536] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.737688] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances with incomplete migration {{(pid=62730) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1395.747640] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.737878] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.750209] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.750523] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.750724] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.750884] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1397.752062] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0025d6a2-c766-450b-9b42-bd1ecae1f75e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.761236] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7567cb2b-b7cc-41aa-9283-7d9f1488a9e8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.776766] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5051dfa5-fc6b-4229-a2ce-5c1fb35e4df7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.783704] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f16fd30-024e-4f71-a175-1198d05bdc2f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.813096] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180544MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1397.813283] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.813484] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.958639] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 01a34662-fef9-4855-ba3c-39184982fd0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.958824] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.958992] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.959155] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.959304] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.959560] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.959560] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.959668] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.959771] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.959884] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1397.972073] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 07bb9890-0ebe-4ce3-98b9-2fe35a9a6796 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1397.983106] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance be7f1a05-96f9-430c-b5ad-13fa1aae685b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1397.994057] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c0ada899-0ddb-456a-a1f3-097529654318 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1398.004753] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f5f9fdc8-ca89-438e-a710-b3e1dd85f550 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1398.016317] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1398.027511] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 876523eb-d8f4-4e0a-b9c2-2d9c074e6817 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1398.037867] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1398.047580] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1398.057437] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1398.057669] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1398.057830] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '67', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'io_workload': '10', 'num_proj_c54046535dc74172a58cc8e350f2d88d': '1', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_9992614978224ad7bd8ed947a0cf69bc': '1', 'num_task_spawning': '1', 'num_proj_06d1cb82c61344ebb38e2ef9a6c95a6c': '1', 'num_proj_93039e316cca49179277828e04a9ce61': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '2', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1398.077060] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing inventories for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1398.094400] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating ProviderTree inventory for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1398.094555] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1398.105896] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing aggregate associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, aggregates: None {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1398.124427] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing trait associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1398.379518] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7c802d-185f-4c8c-93cd-ac2c17707bcd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.388654] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ebe482-20b7-4b45-ad7d-5c9af1a9a5a8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.419483] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3b8ddc-b86f-4233-9be6-9cdac930defc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.427788] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba0ae87-1de7-40bb-b769-65578ae8d0ea {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.441365] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1398.450458] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1398.466629] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1398.466871] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.653s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.467636] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.467928] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1399.467966] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1399.491769] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.491769] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.491946] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.492100] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.493023] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.493023] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.493023] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: e8657fe0-3db2-4768-817f-944a736da401] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.493023] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.493023] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.493398] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1399.493398] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1399.493495] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1401.463393] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ea7314b7-4708-454e-81c3-16f016f1ef10 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "2a15c7f4-16ec-4238-ac95-8de298292584" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.463833] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ea7314b7-4708-454e-81c3-16f016f1ef10 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "2a15c7f4-16ec-4238-ac95-8de298292584" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.726556] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2e48bcc2-f791-46b4-8d1f-f1d6acdac595 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Acquiring lock "8b5e2cd3-8cd3-4b78-b4df-72233fb3db57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.727282] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2e48bcc2-f791-46b4-8d1f-f1d6acdac595 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Lock "8b5e2cd3-8cd3-4b78-b4df-72233fb3db57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.737094] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1404.737266] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1408.738520] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.738862] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1408.750208] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] There are 0 instances to clean {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1415.965113] env[62730]: DEBUG oslo_concurrency.lockutils [None req-34b5631b-ff30-43f0-9380-8c09b85ee05f tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "50720565-689e-45e1-a17f-d4673844d6ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.965498] env[62730]: DEBUG oslo_concurrency.lockutils [None req-34b5631b-ff30-43f0-9380-8c09b85ee05f tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "50720565-689e-45e1-a17f-d4673844d6ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.468796] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3fd328a7-da28-453d-9647-6a7611d25ba6 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Acquiring lock "16e44e55-0d5c-407b-8a1f-b1ba0ed61dac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.469034] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3fd328a7-da28-453d-9647-6a7611d25ba6 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "16e44e55-0d5c-407b-8a1f-b1ba0ed61dac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.895317] env[62730]: WARNING oslo_vmware.rw_handles [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1421.895317] env[62730]: ERROR oslo_vmware.rw_handles [ 1421.895988] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1421.897575] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1421.897832] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Copying Virtual Disk [datastore2] vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/11c66110-2d73-43f4-a78b-468ef023dc14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1421.898137] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7637fd54-e9c4-4347-a3c5-5e4cc098d8dc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.906438] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 1421.906438] env[62730]: value = "task-4837199" [ 1421.906438] env[62730]: _type = "Task" [ 1421.906438] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.914762] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': task-4837199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.416883] env[62730]: DEBUG oslo_vmware.exceptions [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1422.417166] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.417709] env[62730]: ERROR nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1422.417709] env[62730]: Faults: ['InvalidArgument'] [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Traceback (most recent call last): [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] yield resources [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self.driver.spawn(context, instance, image_meta, [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self._fetch_image_if_missing(context, vi) [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1422.417709] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] image_cache(vi, tmp_image_ds_loc) [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] vm_util.copy_virtual_disk( [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] session._wait_for_task(vmdk_copy_task) [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] return self.wait_for_task(task_ref) [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] return evt.wait() [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] result = hub.switch() [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] return self.greenlet.switch() [ 1422.418084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1422.418395] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self.f(*self.args, **self.kw) [ 1422.418395] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1422.418395] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] raise exceptions.translate_fault(task_info.error) [ 1422.418395] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1422.418395] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Faults: ['InvalidArgument'] [ 1422.418395] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] [ 1422.418395] env[62730]: INFO nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Terminating instance [ 1422.419650] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.419862] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1422.420529] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1422.420729] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1422.420963] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99218ec0-87a5-4cff-ae98-2c11ac4cd2d0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.423276] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4154f6d-1272-44db-9d1b-bc2e4fd9031d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.430789] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1422.431068] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a09c6b2d-7a48-4f42-a7d6-532a781a5c50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.433489] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1422.433666] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1422.434776] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b89361b2-ebd4-41bd-9d0d-c8784e1d9728 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.440186] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Waiting for the task: (returnval){ [ 1422.440186] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d5343e-3741-5229-6de6-99cde8daa8f1" [ 1422.440186] env[62730]: _type = "Task" [ 1422.440186] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.449243] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d5343e-3741-5229-6de6-99cde8daa8f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.505441] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1422.505644] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1422.505833] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Deleting the datastore file [datastore2] 01a34662-fef9-4855-ba3c-39184982fd0e {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1422.506127] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e41e79e9-2b3d-40db-b952-322432897640 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.513221] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 1422.513221] env[62730]: value = "task-4837201" [ 1422.513221] env[62730]: _type = "Task" [ 1422.513221] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.521379] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': task-4837201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.951545] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1422.951906] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Creating directory with path [datastore2] vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1422.952036] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85f81caa-8212-454b-bb13-2e8147ec1e11 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.967755] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Created directory with path [datastore2] vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1422.970019] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Fetch image to [datastore2] vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1422.970019] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1422.970019] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d534740-4544-4121-9929-e99e371af578 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.979610] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed3c11c-8c4a-4174-9179-3b211f977d11 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.992185] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a08fc6-4de7-47d7-9557-19508607720d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.032220] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b364c59-8d87-4b85-9b2a-32c289d38ebe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.042260] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d5ffd684-ca5b-4ad0-8566-0ea1e38dbf77 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.044746] env[62730]: DEBUG oslo_vmware.api [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': task-4837201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077502} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.045019] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.045215] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1423.045391] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1423.045568] env[62730]: INFO nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1423.048318] env[62730]: DEBUG nova.compute.claims [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1423.048318] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.048465] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.093330] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1423.161406] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1423.231436] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1423.231613] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1423.446186] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb59ce01-903b-4eb0-a358-341030456580 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.454493] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae8a77e-35f5-4c44-991f-b7b8294b066d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.487542] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64b5188-9214-455e-a3d9-2b3cb43b1552 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.495863] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b26d644-9160-4dea-ac59-15ca1e55bfef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.510447] env[62730]: DEBUG nova.compute.provider_tree [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.519789] env[62730]: DEBUG nova.scheduler.client.report [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1423.535715] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.487s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.536084] env[62730]: ERROR nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1423.536084] env[62730]: Faults: ['InvalidArgument'] [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Traceback (most recent call last): [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self.driver.spawn(context, instance, image_meta, [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self._fetch_image_if_missing(context, vi) [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] image_cache(vi, tmp_image_ds_loc) [ 1423.536084] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] vm_util.copy_virtual_disk( [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] session._wait_for_task(vmdk_copy_task) [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] return self.wait_for_task(task_ref) [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] return evt.wait() [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] result = hub.switch() [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] return self.greenlet.switch() [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1423.536419] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] self.f(*self.args, **self.kw) [ 1423.536717] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1423.536717] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] raise exceptions.translate_fault(task_info.error) [ 1423.536717] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1423.536717] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Faults: ['InvalidArgument'] [ 1423.536717] env[62730]: ERROR nova.compute.manager [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] [ 1423.536837] env[62730]: DEBUG nova.compute.utils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1423.538492] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Build of instance 01a34662-fef9-4855-ba3c-39184982fd0e was re-scheduled: A specified parameter was not correct: fileType [ 1423.538492] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1423.538877] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1423.539068] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1423.539250] env[62730]: DEBUG nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1423.539442] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1423.957218] env[62730]: DEBUG nova.network.neutron [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.971881] env[62730]: INFO nova.compute.manager [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Took 0.43 seconds to deallocate network for instance. [ 1424.106457] env[62730]: INFO nova.scheduler.client.report [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Deleted allocations for instance 01a34662-fef9-4855-ba3c-39184982fd0e [ 1424.133161] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e545ca2-6739-48a4-9111-038676987c99 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "01a34662-fef9-4855-ba3c-39184982fd0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.046s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.134419] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "01a34662-fef9-4855-ba3c-39184982fd0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.942s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.134650] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "01a34662-fef9-4855-ba3c-39184982fd0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.134859] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "01a34662-fef9-4855-ba3c-39184982fd0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.135045] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "01a34662-fef9-4855-ba3c-39184982fd0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.137736] env[62730]: INFO nova.compute.manager [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Terminating instance [ 1424.143784] env[62730]: DEBUG nova.compute.manager [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1424.143999] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1424.144381] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15ff468b-af20-4219-b33d-0b2dafb8e468 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.149680] env[62730]: DEBUG nova.compute.manager [None req-a9e2d498-536a-47b9-9670-523b109063a6 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1424.156733] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd55604-403a-4f8b-974f-18528f1efe22 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.178385] env[62730]: DEBUG nova.compute.manager [None req-a9e2d498-536a-47b9-9670-523b109063a6 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1424.195894] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01a34662-fef9-4855-ba3c-39184982fd0e could not be found. [ 1424.195894] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1424.196029] env[62730]: INFO nova.compute.manager [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1424.196274] env[62730]: DEBUG oslo.service.loopingcall [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.196514] env[62730]: DEBUG nova.compute.manager [-] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1424.196613] env[62730]: DEBUG nova.network.neutron [-] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1424.210136] env[62730]: DEBUG oslo_concurrency.lockutils [None req-a9e2d498-536a-47b9-9670-523b109063a6 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "58319687-e5ed-41ba-bfa9-bf7e9b6f6bd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.920s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.220324] env[62730]: DEBUG nova.compute.manager [None req-d4987e11-0b5e-4331-9378-00c1af7e24b2 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 07bb9890-0ebe-4ce3-98b9-2fe35a9a6796] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1424.234554] env[62730]: DEBUG nova.network.neutron [-] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.248734] env[62730]: DEBUG nova.compute.manager [None req-d4987e11-0b5e-4331-9378-00c1af7e24b2 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 07bb9890-0ebe-4ce3-98b9-2fe35a9a6796] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1424.250457] env[62730]: INFO nova.compute.manager [-] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] Took 0.05 seconds to deallocate network for instance. [ 1424.272179] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d4987e11-0b5e-4331-9378-00c1af7e24b2 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "07bb9890-0ebe-4ce3-98b9-2fe35a9a6796" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.528s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.283719] env[62730]: DEBUG nova.compute.manager [None req-6e18dc7e-cc53-4a77-a713-45e7c5102144 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] [instance: be7f1a05-96f9-430c-b5ad-13fa1aae685b] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1424.318305] env[62730]: DEBUG nova.compute.manager [None req-6e18dc7e-cc53-4a77-a713-45e7c5102144 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] [instance: be7f1a05-96f9-430c-b5ad-13fa1aae685b] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1424.341202] env[62730]: DEBUG oslo_concurrency.lockutils [None req-6e18dc7e-cc53-4a77-a713-45e7c5102144 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Lock "be7f1a05-96f9-430c-b5ad-13fa1aae685b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.524s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.363011] env[62730]: DEBUG nova.compute.manager [None req-8f30016e-2035-4661-9031-4df05342c015 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c0ada899-0ddb-456a-a1f3-097529654318] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1424.399260] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ae63d236-a373-4190-94bf-8d5008507489 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "01a34662-fef9-4855-ba3c-39184982fd0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.265s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.400343] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "01a34662-fef9-4855-ba3c-39184982fd0e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 330.348s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.400591] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 01a34662-fef9-4855-ba3c-39184982fd0e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1424.400806] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "01a34662-fef9-4855-ba3c-39184982fd0e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.407110] env[62730]: DEBUG nova.compute.manager [None req-8f30016e-2035-4661-9031-4df05342c015 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c0ada899-0ddb-456a-a1f3-097529654318] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1424.429562] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8f30016e-2035-4661-9031-4df05342c015 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c0ada899-0ddb-456a-a1f3-097529654318" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.898s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.447103] env[62730]: DEBUG nova.compute.manager [None req-762028c6-c8c5-45fc-928c-3a23315267e5 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: f5f9fdc8-ca89-438e-a710-b3e1dd85f550] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1424.472817] env[62730]: DEBUG nova.compute.manager [None req-762028c6-c8c5-45fc-928c-3a23315267e5 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: f5f9fdc8-ca89-438e-a710-b3e1dd85f550] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1424.493445] env[62730]: DEBUG oslo_concurrency.lockutils [None req-762028c6-c8c5-45fc-928c-3a23315267e5 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "f5f9fdc8-ca89-438e-a710-b3e1dd85f550" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.429s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.503530] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1424.558912] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.559201] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.560964] env[62730]: INFO nova.compute.claims [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1424.877054] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26275abb-458a-4385-9f1f-d72c8f4fba60 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.885205] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b47877a-3aa6-4822-868d-4569f0e5350d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.917372] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502ce4ad-25e0-4783-a932-ad1c3aa7eb29 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.925511] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4034343c-0b8b-4ed9-8c78-8a19ef0f6569 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.939225] env[62730]: DEBUG nova.compute.provider_tree [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.949346] env[62730]: DEBUG nova.scheduler.client.report [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1424.963681] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.404s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.964194] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1425.002441] env[62730]: DEBUG nova.compute.utils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1425.003993] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1425.004191] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1425.013746] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1425.072855] env[62730]: DEBUG nova.policy [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35ec99cdad2049af9366a48c20c37e53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '642da990c34d4a64be9ab53e87990e8a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1425.080326] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1425.107420] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1425.107728] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1425.107824] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1425.108010] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1425.108171] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1425.108324] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1425.108538] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1425.108712] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1425.108885] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1425.109071] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1425.109256] env[62730]: DEBUG nova.virt.hardware [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1425.110183] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08bc98da-3da2-4a18-8f06-92694e1cfd18 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.118437] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919dacf2-1083-49dd-a1f2-c8843c1e9be0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.549100] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Successfully created port: f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1426.240695] env[62730]: DEBUG nova.compute.manager [req-b735c79b-204d-4083-b80c-b1fe79cbfa0d req-080fc44f-bcef-49ee-a51e-1a4454ba7bd1 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Received event network-vif-plugged-f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1426.240997] env[62730]: DEBUG oslo_concurrency.lockutils [req-b735c79b-204d-4083-b80c-b1fe79cbfa0d req-080fc44f-bcef-49ee-a51e-1a4454ba7bd1 service nova] Acquiring lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.241164] env[62730]: DEBUG oslo_concurrency.lockutils [req-b735c79b-204d-4083-b80c-b1fe79cbfa0d req-080fc44f-bcef-49ee-a51e-1a4454ba7bd1 service nova] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.241343] env[62730]: DEBUG oslo_concurrency.lockutils [req-b735c79b-204d-4083-b80c-b1fe79cbfa0d req-080fc44f-bcef-49ee-a51e-1a4454ba7bd1 service nova] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.241517] env[62730]: DEBUG nova.compute.manager [req-b735c79b-204d-4083-b80c-b1fe79cbfa0d req-080fc44f-bcef-49ee-a51e-1a4454ba7bd1 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] No waiting events found dispatching network-vif-plugged-f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1426.241719] env[62730]: WARNING nova.compute.manager [req-b735c79b-204d-4083-b80c-b1fe79cbfa0d req-080fc44f-bcef-49ee-a51e-1a4454ba7bd1 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Received unexpected event network-vif-plugged-f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea for instance with vm_state building and task_state spawning. [ 1426.348099] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Successfully updated port: f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1426.359465] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "refresh_cache-ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.359604] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquired lock "refresh_cache-ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.359754] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1426.422907] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1426.802281] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Updating instance_info_cache with network_info: [{"id": "f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea", "address": "fa:16:3e:95:09:e9", "network": {"id": "3656a622-8af4-4d35-ad78-508fac46e0a4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1129549259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "642da990c34d4a64be9ab53e87990e8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e044cfd4-1b0d-4d88-b1bd-604025731d3f", "external-id": "nsx-vlan-transportzone-372", "segmentation_id": 372, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf796e0fb-c5", "ovs_interfaceid": "f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.815103] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Releasing lock "refresh_cache-ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.815420] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Instance network_info: |[{"id": "f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea", "address": "fa:16:3e:95:09:e9", "network": {"id": "3656a622-8af4-4d35-ad78-508fac46e0a4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1129549259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "642da990c34d4a64be9ab53e87990e8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e044cfd4-1b0d-4d88-b1bd-604025731d3f", "external-id": "nsx-vlan-transportzone-372", "segmentation_id": 372, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf796e0fb-c5", "ovs_interfaceid": "f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1426.815831] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:09:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e044cfd4-1b0d-4d88-b1bd-604025731d3f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1426.823901] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Creating folder: Project (642da990c34d4a64be9ab53e87990e8a). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1426.824599] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a7d305a-2bd7-4b94-b402-34a5f0ee13ac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.838202] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Created folder: Project (642da990c34d4a64be9ab53e87990e8a) in parent group-v942928. [ 1426.838485] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Creating folder: Instances. Parent ref: group-v943011. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1426.838742] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e60f52a-58c5-4f43-a2e5-d417f3ae58e1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.849511] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Created folder: Instances in parent group-v943011. [ 1426.849784] env[62730]: DEBUG oslo.service.loopingcall [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.850037] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1426.850239] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37b757c4-5b28-4a07-9b8a-1c2f1afbdb22 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.870781] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1426.870781] env[62730]: value = "task-4837204" [ 1426.870781] env[62730]: _type = "Task" [ 1426.870781] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.879176] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837204, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.381369] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837204, 'name': CreateVM_Task, 'duration_secs': 0.303004} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.381761] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1427.382272] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.382442] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.382772] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1427.383046] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9c1f8c1-3257-4713-abfb-9d7a2a2255ba {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.388075] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Waiting for the task: (returnval){ [ 1427.388075] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5299e4a0-5746-a8f5-b8f8-c4954d3cba3b" [ 1427.388075] env[62730]: _type = "Task" [ 1427.388075] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.396391] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5299e4a0-5746-a8f5-b8f8-c4954d3cba3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.900779] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.901151] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1427.901509] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.268271] env[62730]: DEBUG nova.compute.manager [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Received event network-changed-f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1428.268473] env[62730]: DEBUG nova.compute.manager [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Refreshing instance network info cache due to event network-changed-f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1428.268682] env[62730]: DEBUG oslo_concurrency.lockutils [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] Acquiring lock "refresh_cache-ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.268828] env[62730]: DEBUG oslo_concurrency.lockutils [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] Acquired lock "refresh_cache-ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.268989] env[62730]: DEBUG nova.network.neutron [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Refreshing network info cache for port f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1428.569995] env[62730]: DEBUG nova.network.neutron [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Updated VIF entry in instance network info cache for port f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1428.570411] env[62730]: DEBUG nova.network.neutron [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Updating instance_info_cache with network_info: [{"id": "f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea", "address": "fa:16:3e:95:09:e9", "network": {"id": "3656a622-8af4-4d35-ad78-508fac46e0a4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1129549259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "642da990c34d4a64be9ab53e87990e8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e044cfd4-1b0d-4d88-b1bd-604025731d3f", "external-id": "nsx-vlan-transportzone-372", "segmentation_id": 372, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf796e0fb-c5", "ovs_interfaceid": "f796e0fb-c5f5-4b3a-a3c2-50a6aa803fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.579999] env[62730]: DEBUG oslo_concurrency.lockutils [req-71ce2b99-963e-4b47-b435-0c6460d9c93c req-73a8534a-ba8d-4155-8d43-140abac448b4 service nova] Releasing lock "refresh_cache-ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.668276] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.749335] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.733487] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.738057] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.738430] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.738430] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.737795] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.738069] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.751625] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.751937] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.752186] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.752404] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1457.753925] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed56cad-99da-420e-a722-7f4f3f98c6f0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.764055] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03585a06-1aea-4055-bade-47634cc7e249 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.779292] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8edaa18-3157-40f8-adc0-81e181440585 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.786707] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4745df0-cffb-4eb7-a87e-b846885c01d2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.817254] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180525MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1457.817468] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.817641] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.893681] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.893864] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.893995] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.894140] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.894262] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.894383] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.894504] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.894624] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.894741] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.894858] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1457.907174] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 876523eb-d8f4-4e0a-b9c2-2d9c074e6817 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1457.918420] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1457.928997] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1457.940108] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1457.950925] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2a15c7f4-16ec-4238-ac95-8de298292584 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1457.964029] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8b5e2cd3-8cd3-4b78-b4df-72233fb3db57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1457.975148] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 50720565-689e-45e1-a17f-d4673844d6ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1458.007115] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16e44e55-0d5c-407b-8a1f-b1ba0ed61dac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1458.007344] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1458.007451] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '73', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_c54046535dc74172a58cc8e350f2d88d': '1', 'io_workload': '10', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_9992614978224ad7bd8ed947a0cf69bc': '1', 'num_task_spawning': '1', 'num_proj_06d1cb82c61344ebb38e2ef9a6c95a6c': '1', 'num_proj_93039e316cca49179277828e04a9ce61': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '2', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1458.242634] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8245acb-11aa-4781-96e6-70e2df802389 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.250837] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3b0b4a-f069-48f1-804d-7d4b072d2d6f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.282016] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca114e63-196a-4c5e-884f-a56423bdfc77 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.290105] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c70184-1731-4349-a238-69631ca38e52 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.303636] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1458.312217] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1458.327657] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1458.327725] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.510s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.327704] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1460.327950] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1460.327950] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1460.347330] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348112] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348112] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348112] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348286] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348286] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: e8657fe0-3db2-4768-817f-944a736da401] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348405] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348538] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348707] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.348879] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1460.349022] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1460.349573] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1464.737659] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1464.738135] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1471.144056] env[62730]: WARNING oslo_vmware.rw_handles [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1471.144056] env[62730]: ERROR oslo_vmware.rw_handles [ 1471.144711] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1471.146369] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1471.146705] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Copying Virtual Disk [datastore2] vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/0a4f0379-6bd0-4ead-baaa-d4dc8e453b14/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1471.147074] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9275672d-f75f-431c-8f13-9b9c8989b747 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.155189] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Waiting for the task: (returnval){ [ 1471.155189] env[62730]: value = "task-4837205" [ 1471.155189] env[62730]: _type = "Task" [ 1471.155189] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.163743] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Task: {'id': task-4837205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.666316] env[62730]: DEBUG oslo_vmware.exceptions [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1471.666683] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.667299] env[62730]: ERROR nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1471.667299] env[62730]: Faults: ['InvalidArgument'] [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Traceback (most recent call last): [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] yield resources [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self.driver.spawn(context, instance, image_meta, [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self._fetch_image_if_missing(context, vi) [ 1471.667299] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] image_cache(vi, tmp_image_ds_loc) [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] vm_util.copy_virtual_disk( [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] session._wait_for_task(vmdk_copy_task) [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] return self.wait_for_task(task_ref) [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] return evt.wait() [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] result = hub.switch() [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1471.667643] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] return self.greenlet.switch() [ 1471.667964] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1471.667964] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self.f(*self.args, **self.kw) [ 1471.667964] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1471.667964] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] raise exceptions.translate_fault(task_info.error) [ 1471.667964] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1471.667964] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Faults: ['InvalidArgument'] [ 1471.667964] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] [ 1471.667964] env[62730]: INFO nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Terminating instance [ 1471.669356] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.669616] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.669862] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41d9417c-96f4-4805-aa33-8a09e044b5c1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.672385] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1471.672576] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1471.673603] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1b7bf6-32ba-4615-96f3-e0795ff40a4b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.680918] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1471.681190] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-527283f7-d308-4137-b8da-fda2c4cf5183 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.683472] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.683646] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1471.684634] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb2be151-9044-48c6-bc54-2c495c1cfecb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.690122] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Waiting for the task: (returnval){ [ 1471.690122] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d6dced-4273-63bc-1338-460e488bff56" [ 1471.690122] env[62730]: _type = "Task" [ 1471.690122] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.698029] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d6dced-4273-63bc-1338-460e488bff56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.762430] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1471.762648] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1471.762815] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Deleting the datastore file [datastore2] 4a830a6a-d473-4ae4-858e-2330e42f8c9e {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1471.763107] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fc2f952-a213-4f0a-b2d9-1781c71b8b62 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.769717] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Waiting for the task: (returnval){ [ 1471.769717] env[62730]: value = "task-4837207" [ 1471.769717] env[62730]: _type = "Task" [ 1471.769717] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.778013] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Task: {'id': task-4837207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.201840] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1472.202241] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Creating directory with path [datastore2] vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1472.202395] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1ce7cb8-b66c-4802-acdc-813eb6e862a9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.214056] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Created directory with path [datastore2] vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1472.214251] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Fetch image to [datastore2] vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1472.214431] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1472.215194] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9a2f47-1051-4704-ab25-84bf7515bb38 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.222338] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dd289a-d11f-43c4-892d-2638d9325f11 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.231536] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14108687-b3b7-4b21-bb81-9389aa49fe46 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.263390] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43a835e-a6f1-43de-ab70-f21fdda98cd4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.270013] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6cc1335c-2584-4d2d-aba0-d477a3420a36 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.279361] env[62730]: DEBUG oslo_vmware.api [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Task: {'id': task-4837207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079353} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.279647] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1472.279838] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1472.280024] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1472.280207] env[62730]: INFO nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1472.282490] env[62730]: DEBUG nova.compute.claims [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1472.282663] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.282878] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.296044] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1472.351448] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1472.411509] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1472.411779] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1472.599636] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedf8198-a8ba-42d9-9170-5bb2e84a1d22 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.607762] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1912043b-78f5-4bf1-a658-db2e066cabfb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.637503] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293b4d3a-0017-4de8-bcc1-3900cfbe6435 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.645469] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022983cd-c620-41c4-9ddd-73708ad8a2ff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.658783] env[62730]: DEBUG nova.compute.provider_tree [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.668574] env[62730]: DEBUG nova.scheduler.client.report [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1472.686569] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.403s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.687221] env[62730]: ERROR nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1472.687221] env[62730]: Faults: ['InvalidArgument'] [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Traceback (most recent call last): [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self.driver.spawn(context, instance, image_meta, [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self._fetch_image_if_missing(context, vi) [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] image_cache(vi, tmp_image_ds_loc) [ 1472.687221] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] vm_util.copy_virtual_disk( [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] session._wait_for_task(vmdk_copy_task) [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] return self.wait_for_task(task_ref) [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] return evt.wait() [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] result = hub.switch() [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] return self.greenlet.switch() [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1472.687607] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] self.f(*self.args, **self.kw) [ 1472.688046] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1472.688046] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] raise exceptions.translate_fault(task_info.error) [ 1472.688046] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1472.688046] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Faults: ['InvalidArgument'] [ 1472.688046] env[62730]: ERROR nova.compute.manager [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] [ 1472.688046] env[62730]: DEBUG nova.compute.utils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1472.690796] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Build of instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e was re-scheduled: A specified parameter was not correct: fileType [ 1472.690796] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1472.691268] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1472.691463] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1472.691659] env[62730]: DEBUG nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1472.691864] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1473.183269] env[62730]: DEBUG nova.network.neutron [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.193337] env[62730]: INFO nova.compute.manager [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Took 0.50 seconds to deallocate network for instance. [ 1473.297951] env[62730]: INFO nova.scheduler.client.report [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Deleted allocations for instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e [ 1473.322049] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3b4214e5-7f2c-481c-938b-1bd459b33167 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.756s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.323421] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.913s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.323730] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Acquiring lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.323954] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.324150] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.326912] env[62730]: INFO nova.compute.manager [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Terminating instance [ 1473.329199] env[62730]: DEBUG nova.compute.manager [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1473.329405] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1473.329992] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47de0370-7efd-4715-8980-54fb7998ac0b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.339598] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9133d4d-9482-4235-aaa5-0111da8b6d19 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.350747] env[62730]: DEBUG nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1473.374149] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a830a6a-d473-4ae4-858e-2330e42f8c9e could not be found. [ 1473.374362] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1473.374505] env[62730]: INFO nova.compute.manager [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1473.374754] env[62730]: DEBUG oslo.service.loopingcall [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.374997] env[62730]: DEBUG nova.compute.manager [-] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1473.375109] env[62730]: DEBUG nova.network.neutron [-] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1473.399721] env[62730]: DEBUG nova.network.neutron [-] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.406540] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.406785] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.408631] env[62730]: INFO nova.compute.claims [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1473.413052] env[62730]: INFO nova.compute.manager [-] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] Took 0.04 seconds to deallocate network for instance. [ 1473.518719] env[62730]: DEBUG oslo_concurrency.lockutils [None req-4e9bc92b-6537-430d-a5a9-20c4c47ccd85 tempest-InstanceActionsTestJSON-2143266866 tempest-InstanceActionsTestJSON-2143266866-project-member] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.196s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.520086] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 379.467s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.520086] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4a830a6a-d473-4ae4-858e-2330e42f8c9e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1473.520268] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "4a830a6a-d473-4ae4-858e-2330e42f8c9e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.738764] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b665716-330c-4c49-a695-fe469ef85408 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.747089] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e047620d-7a66-471a-82f7-d0ae7aebeff8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.780191] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86337331-f82d-4bf4-a8c0-aa5a8115e98c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.788296] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8baa2f-f4c2-485e-9bf7-99baf201d0e5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.802651] env[62730]: DEBUG nova.compute.provider_tree [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.813018] env[62730]: DEBUG nova.scheduler.client.report [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.829471] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.423s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.829913] env[62730]: DEBUG nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1473.864185] env[62730]: DEBUG nova.compute.utils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1473.866357] env[62730]: DEBUG nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1473.866541] env[62730]: DEBUG nova.network.neutron [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1473.880022] env[62730]: DEBUG nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1473.921604] env[62730]: INFO nova.virt.block_device [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Booting with volume 84ea5bd8-e56f-472f-b5f1-baec048c6518 at /dev/sda [ 1473.972669] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f84a36a0-063c-4b58-94e2-22a9b7e0bf21 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.976179] env[62730]: DEBUG nova.policy [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51648ab0f937434e92129440f2281094', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '730d4e05fe37446181d8e5989a1a6c1d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1473.984556] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de753cd7-1c2b-43c8-9907-b7d938d59f71 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.017786] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-50a8b299-49a7-444f-916f-8cd28a1fb8c9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.026115] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952f8c43-4594-45ac-9acd-c9b28720cf3e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.056354] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e50a7c2-766c-48f2-9a29-ff0836118f43 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.063547] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6a5f25-6094-4bc8-891c-68fd305302b0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.077664] env[62730]: DEBUG nova.virt.block_device [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Updating existing volume attachment record: 507163b8-a8d8-44eb-ac88-58345720f449 {{(pid=62730) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1474.304566] env[62730]: DEBUG nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1474.304566] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1474.304566] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1474.304900] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1474.304900] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1474.304900] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1474.305871] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1474.305871] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1474.305871] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1474.306835] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1474.306835] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1474.306835] env[62730]: DEBUG nova.virt.hardware [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1474.307830] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a5000f-9b89-4717-a7a1-29a3312aea72 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.317162] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96723c37-fcd1-43e5-9b7e-1ee686696b43 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.396666] env[62730]: DEBUG nova.network.neutron [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Successfully created port: 5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1475.507212] env[62730]: DEBUG nova.compute.manager [req-106ffd1f-2581-4e5d-8bf2-fb7700f977d8 req-1beb146c-7e2e-465d-90e6-c64a8d1fa72b service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Received event network-vif-plugged-5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1475.507606] env[62730]: DEBUG oslo_concurrency.lockutils [req-106ffd1f-2581-4e5d-8bf2-fb7700f977d8 req-1beb146c-7e2e-465d-90e6-c64a8d1fa72b service nova] Acquiring lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.507686] env[62730]: DEBUG oslo_concurrency.lockutils [req-106ffd1f-2581-4e5d-8bf2-fb7700f977d8 req-1beb146c-7e2e-465d-90e6-c64a8d1fa72b service nova] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.507862] env[62730]: DEBUG oslo_concurrency.lockutils [req-106ffd1f-2581-4e5d-8bf2-fb7700f977d8 req-1beb146c-7e2e-465d-90e6-c64a8d1fa72b service nova] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.508162] env[62730]: DEBUG nova.compute.manager [req-106ffd1f-2581-4e5d-8bf2-fb7700f977d8 req-1beb146c-7e2e-465d-90e6-c64a8d1fa72b service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] No waiting events found dispatching network-vif-plugged-5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1475.508396] env[62730]: WARNING nova.compute.manager [req-106ffd1f-2581-4e5d-8bf2-fb7700f977d8 req-1beb146c-7e2e-465d-90e6-c64a8d1fa72b service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Received unexpected event network-vif-plugged-5d5953ba-1516-4007-8b70-ada059092409 for instance with vm_state building and task_state spawning. [ 1475.590886] env[62730]: DEBUG nova.network.neutron [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Successfully updated port: 5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1475.605957] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Acquiring lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.606133] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Acquired lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.606285] env[62730]: DEBUG nova.network.neutron [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1475.648896] env[62730]: DEBUG nova.network.neutron [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1475.831491] env[62730]: DEBUG nova.network.neutron [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Updating instance_info_cache with network_info: [{"id": "5d5953ba-1516-4007-8b70-ada059092409", "address": "fa:16:3e:84:f0:f9", "network": {"id": "780c80e6-1355-44f9-b21d-a2669bafdb14", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-386231415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "730d4e05fe37446181d8e5989a1a6c1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5953ba-15", "ovs_interfaceid": "5d5953ba-1516-4007-8b70-ada059092409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.876466] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Releasing lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.876727] env[62730]: DEBUG nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Instance network_info: |[{"id": "5d5953ba-1516-4007-8b70-ada059092409", "address": "fa:16:3e:84:f0:f9", "network": {"id": "780c80e6-1355-44f9-b21d-a2669bafdb14", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-386231415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "730d4e05fe37446181d8e5989a1a6c1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5953ba-15", "ovs_interfaceid": "5d5953ba-1516-4007-8b70-ada059092409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1475.877188] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:f0:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '779b8e65-8b9e-427e-af08-910febd65bfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d5953ba-1516-4007-8b70-ada059092409', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1475.884651] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Creating folder: Project (730d4e05fe37446181d8e5989a1a6c1d). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1475.885237] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96c9d5f6-dd91-4b7a-bc78-1ba526909e53 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.898870] env[62730]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1475.899060] env[62730]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62730) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1475.899395] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Folder already exists: Project (730d4e05fe37446181d8e5989a1a6c1d). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1475.899625] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Creating folder: Instances. Parent ref: group-v943004. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1475.899851] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a89aab4a-5b3d-4dc3-b173-bdbac88fdcb1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.909753] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Created folder: Instances in parent group-v943004. [ 1475.909985] env[62730]: DEBUG oslo.service.loopingcall [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1475.910189] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1475.910389] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f85ffd15-f448-47e0-942c-66ab5174840d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.930120] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1475.930120] env[62730]: value = "task-4837210" [ 1475.930120] env[62730]: _type = "Task" [ 1475.930120] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.937794] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837210, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.440884] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837210, 'name': CreateVM_Task, 'duration_secs': 0.283894} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.441324] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1476.442034] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'guest_format': None, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-943007', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'name': 'volume-84ea5bd8-e56f-472f-b5f1-baec048c6518', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '876523eb-d8f4-4e0a-b9c2-2d9c074e6817', 'attached_at': '', 'detached_at': '', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'serial': '84ea5bd8-e56f-472f-b5f1-baec048c6518'}, 'device_type': None, 'delete_on_termination': True, 'boot_index': 0, 'attachment_id': '507163b8-a8d8-44eb-ac88-58345720f449', 'volume_type': None}], 'swap': None} {{(pid=62730) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1476.442308] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Root volume attach. Driver type: vmdk {{(pid=62730) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1476.443109] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd4e450-76b1-472a-88bf-891c6b74d628 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.451840] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1121a2d-8aa5-468a-a8db-ab065bb105aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.458355] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e6fc6e-b793-47b0-a42c-7ec48785b341 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.464704] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b2600526-e73b-4c42-8452-22b265fcc702 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.471646] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1476.471646] env[62730]: value = "task-4837211" [ 1476.471646] env[62730]: _type = "Task" [ 1476.471646] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.479698] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837211, 'name': RelocateVM_Task} progress is 5%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.981704] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837211, 'name': RelocateVM_Task, 'duration_secs': 0.379812} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.982132] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Volume attach. Driver type: vmdk {{(pid=62730) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1476.982214] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-943007', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'name': 'volume-84ea5bd8-e56f-472f-b5f1-baec048c6518', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '876523eb-d8f4-4e0a-b9c2-2d9c074e6817', 'attached_at': '', 'detached_at': '', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'serial': '84ea5bd8-e56f-472f-b5f1-baec048c6518'} {{(pid=62730) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1476.982947] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81554fc-93bf-4c9c-9750-967543ae3d81 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.999781] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22dfaf9-cb75-48dc-94d1-fafbf6b7666c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.024260] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] volume-84ea5bd8-e56f-472f-b5f1-baec048c6518/volume-84ea5bd8-e56f-472f-b5f1-baec048c6518.vmdk or device None with type thin {{(pid=62730) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1477.024665] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b1cc461-dcde-4192-807f-a606c91257b4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.048125] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1477.048125] env[62730]: value = "task-4837212" [ 1477.048125] env[62730]: _type = "Task" [ 1477.048125] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.056619] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837212, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.540117] env[62730]: DEBUG nova.compute.manager [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Received event network-changed-5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1477.540323] env[62730]: DEBUG nova.compute.manager [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Refreshing instance network info cache due to event network-changed-5d5953ba-1516-4007-8b70-ada059092409. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1477.540543] env[62730]: DEBUG oslo_concurrency.lockutils [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] Acquiring lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.540693] env[62730]: DEBUG oslo_concurrency.lockutils [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] Acquired lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.540854] env[62730]: DEBUG nova.network.neutron [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Refreshing network info cache for port 5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1477.560945] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837212, 'name': ReconfigVM_Task, 'duration_secs': 0.322341} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.561373] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Reconfigured VM instance instance-00000054 to attach disk [datastore2] volume-84ea5bd8-e56f-472f-b5f1-baec048c6518/volume-84ea5bd8-e56f-472f-b5f1-baec048c6518.vmdk or device None with type thin {{(pid=62730) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1477.566297] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-685ec3c9-ba2e-4c06-b090-88f335e2e47a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.582285] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1477.582285] env[62730]: value = "task-4837213" [ 1477.582285] env[62730]: _type = "Task" [ 1477.582285] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.594443] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837213, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.961497] env[62730]: DEBUG nova.network.neutron [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Updated VIF entry in instance network info cache for port 5d5953ba-1516-4007-8b70-ada059092409. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1477.961983] env[62730]: DEBUG nova.network.neutron [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Updating instance_info_cache with network_info: [{"id": "5d5953ba-1516-4007-8b70-ada059092409", "address": "fa:16:3e:84:f0:f9", "network": {"id": "780c80e6-1355-44f9-b21d-a2669bafdb14", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-386231415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "730d4e05fe37446181d8e5989a1a6c1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5953ba-15", "ovs_interfaceid": "5d5953ba-1516-4007-8b70-ada059092409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.972616] env[62730]: DEBUG oslo_concurrency.lockutils [req-7e7dcfd3-c1b3-4888-90eb-0aa11e3380d9 req-babf885f-b9f2-47ea-8b83-4021d67870b1 service nova] Releasing lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.092463] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837213, 'name': ReconfigVM_Task, 'duration_secs': 0.125188} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.092779] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-943007', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'name': 'volume-84ea5bd8-e56f-472f-b5f1-baec048c6518', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '876523eb-d8f4-4e0a-b9c2-2d9c074e6817', 'attached_at': '', 'detached_at': '', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'serial': '84ea5bd8-e56f-472f-b5f1-baec048c6518'} {{(pid=62730) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1478.093412] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-473fcb3e-80db-486d-8467-11347d40c004 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.100512] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1478.100512] env[62730]: value = "task-4837214" [ 1478.100512] env[62730]: _type = "Task" [ 1478.100512] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.113367] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837214, 'name': Rename_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.611312] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837214, 'name': Rename_Task, 'duration_secs': 0.118385} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.611534] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Powering on the VM {{(pid=62730) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1478.611783] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6089186c-db9c-4b50-8b65-37b1be9631f5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.617920] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1478.617920] env[62730]: value = "task-4837215" [ 1478.617920] env[62730]: _type = "Task" [ 1478.617920] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.628116] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837215, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.128469] env[62730]: DEBUG oslo_vmware.api [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837215, 'name': PowerOnVM_Task, 'duration_secs': 0.421947} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.128851] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Powered on the VM {{(pid=62730) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1479.129025] env[62730]: INFO nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Took 4.83 seconds to spawn the instance on the hypervisor. [ 1479.129337] env[62730]: DEBUG nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Checking state {{(pid=62730) _get_power_state /opt/stack/nova/nova/compute/manager.py:1783}} [ 1479.130202] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fa102d-7e0b-430c-9920-88ad187499d3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.192995] env[62730]: INFO nova.compute.manager [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Took 5.80 seconds to build instance. [ 1479.207751] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e79ee2bd-3513-42d0-9aea-5270eb3a1fbe tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.234s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.217269] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1479.275584] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.275849] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.277431] env[62730]: INFO nova.compute.claims [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1479.537305] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cde8f2e-30e2-40ea-8b4c-1ccf6359fab4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.545624] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41489751-e52e-4b46-9360-f26a6f52a235 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.576946] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3064c6f-29f5-4990-8a67-59b5c364712a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.584771] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f7ee36-3766-4734-8043-3b0866336e3b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.598822] env[62730]: DEBUG nova.compute.provider_tree [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1479.607470] env[62730]: DEBUG nova.scheduler.client.report [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1479.621878] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.346s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.622404] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1479.659099] env[62730]: DEBUG nova.compute.utils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1479.659939] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1479.660124] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1479.671593] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1479.740217] env[62730]: DEBUG nova.policy [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f6f0c96261944aa91e1e3f9806b1025', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca2739fcb8b4c7db333ac9aa362ca50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1479.755537] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1479.782294] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1479.782570] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1479.782697] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1479.782879] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1479.783036] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1479.783195] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1479.783406] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1479.783571] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1479.783747] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1479.783984] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1479.784190] env[62730]: DEBUG nova.virt.hardware [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1479.785051] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cfee97-f3a6-41cb-8a90-152085f157e1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.795297] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53525d46-c3f7-4819-9336-a6f9747fd62a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.452337] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Successfully created port: dc751f04-25af-49fd-ab60-0a563482f9de {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1480.728075] env[62730]: DEBUG nova.compute.manager [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Received event network-changed-5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1480.728326] env[62730]: DEBUG nova.compute.manager [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Refreshing instance network info cache due to event network-changed-5d5953ba-1516-4007-8b70-ada059092409. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1480.728571] env[62730]: DEBUG oslo_concurrency.lockutils [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] Acquiring lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.728730] env[62730]: DEBUG oslo_concurrency.lockutils [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] Acquired lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.728916] env[62730]: DEBUG nova.network.neutron [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Refreshing network info cache for port 5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1481.487204] env[62730]: DEBUG nova.network.neutron [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Updated VIF entry in instance network info cache for port 5d5953ba-1516-4007-8b70-ada059092409. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1481.487694] env[62730]: DEBUG nova.network.neutron [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Updating instance_info_cache with network_info: [{"id": "5d5953ba-1516-4007-8b70-ada059092409", "address": "fa:16:3e:84:f0:f9", "network": {"id": "780c80e6-1355-44f9-b21d-a2669bafdb14", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-386231415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "730d4e05fe37446181d8e5989a1a6c1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "779b8e65-8b9e-427e-af08-910febd65bfa", "external-id": "nsx-vlan-transportzone-906", "segmentation_id": 906, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5953ba-15", "ovs_interfaceid": "5d5953ba-1516-4007-8b70-ada059092409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.501029] env[62730]: DEBUG oslo_concurrency.lockutils [req-5c8cf6bf-d879-466d-aac5-7c4fa568ff1d req-43d92fa0-5e3a-4596-9136-4e048babd958 service nova] Releasing lock "refresh_cache-876523eb-d8f4-4e0a-b9c2-2d9c074e6817" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.871426] env[62730]: DEBUG nova.compute.manager [req-947055dd-6b0b-4a21-923a-775f6a62dcd0 req-04f2b761-73e3-4bb0-81c3-a155c9339d94 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Received event network-vif-plugged-dc751f04-25af-49fd-ab60-0a563482f9de {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1481.871701] env[62730]: DEBUG oslo_concurrency.lockutils [req-947055dd-6b0b-4a21-923a-775f6a62dcd0 req-04f2b761-73e3-4bb0-81c3-a155c9339d94 service nova] Acquiring lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.871985] env[62730]: DEBUG oslo_concurrency.lockutils [req-947055dd-6b0b-4a21-923a-775f6a62dcd0 req-04f2b761-73e3-4bb0-81c3-a155c9339d94 service nova] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.872184] env[62730]: DEBUG oslo_concurrency.lockutils [req-947055dd-6b0b-4a21-923a-775f6a62dcd0 req-04f2b761-73e3-4bb0-81c3-a155c9339d94 service nova] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.872654] env[62730]: DEBUG nova.compute.manager [req-947055dd-6b0b-4a21-923a-775f6a62dcd0 req-04f2b761-73e3-4bb0-81c3-a155c9339d94 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] No waiting events found dispatching network-vif-plugged-dc751f04-25af-49fd-ab60-0a563482f9de {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1481.872939] env[62730]: WARNING nova.compute.manager [req-947055dd-6b0b-4a21-923a-775f6a62dcd0 req-04f2b761-73e3-4bb0-81c3-a155c9339d94 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Received unexpected event network-vif-plugged-dc751f04-25af-49fd-ab60-0a563482f9de for instance with vm_state building and task_state spawning. [ 1481.957608] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Successfully updated port: dc751f04-25af-49fd-ab60-0a563482f9de {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1481.967045] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "refresh_cache-6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.967225] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired lock "refresh_cache-6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.967387] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1482.075722] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1482.334733] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Updating instance_info_cache with network_info: [{"id": "dc751f04-25af-49fd-ab60-0a563482f9de", "address": "fa:16:3e:83:ab:d2", "network": {"id": "ab9217ac-7102-427d-80be-6901bc91ce6f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-684736298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca2739fcb8b4c7db333ac9aa362ca50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc751f04-25", "ovs_interfaceid": "dc751f04-25af-49fd-ab60-0a563482f9de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.347236] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Releasing lock "refresh_cache-6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.347560] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Instance network_info: |[{"id": "dc751f04-25af-49fd-ab60-0a563482f9de", "address": "fa:16:3e:83:ab:d2", "network": {"id": "ab9217ac-7102-427d-80be-6901bc91ce6f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-684736298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca2739fcb8b4c7db333ac9aa362ca50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc751f04-25", "ovs_interfaceid": "dc751f04-25af-49fd-ab60-0a563482f9de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1482.348100] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:ab:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc751f04-25af-49fd-ab60-0a563482f9de', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1482.356668] env[62730]: DEBUG oslo.service.loopingcall [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1482.357124] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1482.357360] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25333609-f745-4d97-a306-841d07b46e28 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.379084] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1482.379084] env[62730]: value = "task-4837216" [ 1482.379084] env[62730]: _type = "Task" [ 1482.379084] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.389257] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837216, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.892068] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837216, 'name': CreateVM_Task, 'duration_secs': 0.321615} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.892458] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1482.894153] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.894486] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.894977] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1482.895332] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a7083bd-19d1-4fda-8bbe-9bc5c9fb7a50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.902940] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1482.902940] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]523df052-5d04-2545-3536-42cedda0570f" [ 1482.902940] env[62730]: _type = "Task" [ 1482.902940] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.914317] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]523df052-5d04-2545-3536-42cedda0570f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.415909] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.416190] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1483.416445] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.902317] env[62730]: DEBUG nova.compute.manager [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Received event network-changed-dc751f04-25af-49fd-ab60-0a563482f9de {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1483.902618] env[62730]: DEBUG nova.compute.manager [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Refreshing instance network info cache due to event network-changed-dc751f04-25af-49fd-ab60-0a563482f9de. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1483.902735] env[62730]: DEBUG oslo_concurrency.lockutils [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] Acquiring lock "refresh_cache-6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.902882] env[62730]: DEBUG oslo_concurrency.lockutils [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] Acquired lock "refresh_cache-6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.903218] env[62730]: DEBUG nova.network.neutron [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Refreshing network info cache for port dc751f04-25af-49fd-ab60-0a563482f9de {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1484.515976] env[62730]: DEBUG nova.network.neutron [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Updated VIF entry in instance network info cache for port dc751f04-25af-49fd-ab60-0a563482f9de. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1484.516382] env[62730]: DEBUG nova.network.neutron [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Updating instance_info_cache with network_info: [{"id": "dc751f04-25af-49fd-ab60-0a563482f9de", "address": "fa:16:3e:83:ab:d2", "network": {"id": "ab9217ac-7102-427d-80be-6901bc91ce6f", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-684736298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca2739fcb8b4c7db333ac9aa362ca50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc751f04-25", "ovs_interfaceid": "dc751f04-25af-49fd-ab60-0a563482f9de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.531694] env[62730]: DEBUG oslo_concurrency.lockutils [req-510b81ba-581b-41b7-95df-196ab1134ef5 req-6e60eaa7-b25d-482f-93a8-d2d315071500 service nova] Releasing lock "refresh_cache-6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.364437] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "842e4145-ba83-48d5-8514-78532381eb2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.364705] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "842e4145-ba83-48d5-8514-78532381eb2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.232341] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Acquiring lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.232652] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.232867] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Acquiring lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.233140] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.233364] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.235478] env[62730]: INFO nova.compute.manager [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Terminating instance [ 1499.237796] env[62730]: DEBUG nova.compute.manager [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1499.238060] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Powering off the VM {{(pid=62730) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1499.238555] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90327579-3826-44a3-a73d-030977b1027e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.247620] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1499.247620] env[62730]: value = "task-4837217" [ 1499.247620] env[62730]: _type = "Task" [ 1499.247620] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.256670] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.758383] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837217, 'name': PowerOffVM_Task, 'duration_secs': 0.169779} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.758664] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Powered off the VM {{(pid=62730) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1499.758874] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Volume detach. Driver type: vmdk {{(pid=62730) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1499.759077] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-943007', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'name': 'volume-84ea5bd8-e56f-472f-b5f1-baec048c6518', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '876523eb-d8f4-4e0a-b9c2-2d9c074e6817', 'attached_at': '', 'detached_at': '', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'serial': '84ea5bd8-e56f-472f-b5f1-baec048c6518'} {{(pid=62730) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1499.759932] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4447950-126b-4bc3-97bf-bfc0e8446fff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.778402] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a2adaa-882b-4d1b-bcf8-8faedbc895f8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.787292] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866d14c1-743f-42ff-a66f-52d5dc4f0cf6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.805567] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18631a60-c81e-406d-b5a1-d44346a51440 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.821396] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] The volume has not been displaced from its original location: [datastore2] volume-84ea5bd8-e56f-472f-b5f1-baec048c6518/volume-84ea5bd8-e56f-472f-b5f1-baec048c6518.vmdk. No consolidation needed. {{(pid=62730) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1499.826537] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=62730) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1499.826836] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2b8026a-eed1-4ee2-a624-755212f5c4dc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.845293] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1499.845293] env[62730]: value = "task-4837218" [ 1499.845293] env[62730]: _type = "Task" [ 1499.845293] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.853910] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837218, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.355561] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837218, 'name': ReconfigVM_Task, 'duration_secs': 0.143692} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.355973] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=62730) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1500.361091] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4ead33b-3804-465f-b039-34b426dc3e05 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.376882] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1500.376882] env[62730]: value = "task-4837219" [ 1500.376882] env[62730]: _type = "Task" [ 1500.376882] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.389270] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837219, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.887625] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837219, 'name': ReconfigVM_Task, 'duration_secs': 0.153737} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.887934] env[62730]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-943007', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'name': 'volume-84ea5bd8-e56f-472f-b5f1-baec048c6518', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '876523eb-d8f4-4e0a-b9c2-2d9c074e6817', 'attached_at': '', 'detached_at': '', 'volume_id': '84ea5bd8-e56f-472f-b5f1-baec048c6518', 'serial': '84ea5bd8-e56f-472f-b5f1-baec048c6518'} {{(pid=62730) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1500.888273] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1500.889108] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f703563c-2acf-408f-a7cc-1f81985c18e4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.896289] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1500.896521] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e7a5edf-b0b0-4327-b270-0dd030d77866 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.956605] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1500.956847] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1500.957024] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Deleting the datastore file [datastore2] 876523eb-d8f4-4e0a-b9c2-2d9c074e6817 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1500.957295] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9a5de7b-2471-444f-95ee-2e0c78c6f3af {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.964194] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for the task: (returnval){ [ 1500.964194] env[62730]: value = "task-4837221" [ 1500.964194] env[62730]: _type = "Task" [ 1500.964194] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.972122] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.474329] env[62730]: DEBUG oslo_vmware.api [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Task: {'id': task-4837221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081616} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.474779] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1501.474779] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1501.474943] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1501.475145] env[62730]: INFO nova.compute.manager [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1501.475390] env[62730]: DEBUG oslo.service.loopingcall [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1501.475616] env[62730]: DEBUG nova.compute.manager [-] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1501.475721] env[62730]: DEBUG nova.network.neutron [-] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1502.726192] env[62730]: DEBUG nova.network.neutron [-] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.746401] env[62730]: INFO nova.compute.manager [-] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Took 1.27 seconds to deallocate network for instance. [ 1502.758446] env[62730]: DEBUG nova.compute.manager [req-31a9ea71-67f9-4459-b6df-759c31478059 req-217681f9-a641-49c7-a072-119fc8537fa2 service nova] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Received event network-vif-deleted-5d5953ba-1516-4007-8b70-ada059092409 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1502.826240] env[62730]: INFO nova.compute.manager [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Took 0.08 seconds to detach 1 volumes for instance. [ 1502.827380] env[62730]: DEBUG nova.compute.manager [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Deleting volume: 84ea5bd8-e56f-472f-b5f1-baec048c6518 {{(pid=62730) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3239}} [ 1502.941949] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.941949] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.941949] env[62730]: DEBUG nova.objects.instance [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lazy-loading 'resources' on Instance uuid 876523eb-d8f4-4e0a-b9c2-2d9c074e6817 {{(pid=62730) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1503.238288] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd52b8b3-4e87-4624-be37-e6654eab8e77 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.246400] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea74da7-c372-45b3-8556-e22d870d7c7e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.280722] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98731726-f95e-44be-929e-216269b73cff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.289948] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16daf557-36b0-443a-b6a5-daed7cd93e32 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.308138] env[62730]: DEBUG nova.compute.provider_tree [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.327045] env[62730]: DEBUG nova.scheduler.client.report [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1503.350035] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.409s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.378149] env[62730]: INFO nova.scheduler.client.report [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Deleted allocations for instance 876523eb-d8f4-4e0a-b9c2-2d9c074e6817 [ 1503.432153] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f22bf01c-6ae1-4815-8e74-3ebc10d0e9b9 tempest-ServersTestBootFromVolume-605577708 tempest-ServersTestBootFromVolume-605577708-project-member] Lock "876523eb-d8f4-4e0a-b9c2-2d9c074e6817" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.199s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.738482] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.738486] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.732607] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.737278] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1517.738098] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1517.750732] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.751079] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.751343] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.751570] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1517.753160] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca0bec1-5386-4d78-a8a8-52b9489de46d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.766708] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5ef1e2-4927-42e7-8b15-91216aad6149 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.787561] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bceea38-128c-4e30-a0be-dbaed929f7e8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.801221] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1856db-68c8-4cc7-9258-3b724bfddd21 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.847017] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180507MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1517.847270] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.847568] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.931879] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.932082] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.932270] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.932380] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.932513] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.932646] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.932802] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.932926] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.933071] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.933208] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1517.946792] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1517.958268] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1517.969030] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2a15c7f4-16ec-4238-ac95-8de298292584 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1517.979628] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8b5e2cd3-8cd3-4b78-b4df-72233fb3db57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1517.990172] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 50720565-689e-45e1-a17f-d4673844d6ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1518.001360] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16e44e55-0d5c-407b-8a1f-b1ba0ed61dac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1518.012450] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1518.012634] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1518.012738] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_51dab0b2d3a645f989f127257241fd91': '1', 'io_workload': '10', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '2', 'num_proj_9992614978224ad7bd8ed947a0cf69bc': '1', 'num_task_spawning': '2', 'num_proj_06d1cb82c61344ebb38e2ef9a6c95a6c': '1', 'num_proj_93039e316cca49179277828e04a9ce61': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '2', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1518.217701] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dd9796-ed15-4694-a686-357ca145380b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.225490] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de0a5ac-6ba6-4104-bf23-454d0fda9b8e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.257565] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792ff45f-6ab0-466f-8478-9a16ad8f85ba {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.265550] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16337dea-a31c-4b86-b5fe-95b6024a5c6e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.280143] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.288617] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1518.304155] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1518.304359] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.457s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.816903] env[62730]: WARNING oslo_vmware.rw_handles [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1519.816903] env[62730]: ERROR oslo_vmware.rw_handles [ 1519.816903] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1519.818914] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1519.819189] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Copying Virtual Disk [datastore2] vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/f8611f9a-1502-47a8-a3ba-ce119726bdb5/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1519.819479] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81ca3e72-6f4b-4004-9a7b-320cd9f3ded5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.827933] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Waiting for the task: (returnval){ [ 1519.827933] env[62730]: value = "task-4837223" [ 1519.827933] env[62730]: _type = "Task" [ 1519.827933] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.837102] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Task: {'id': task-4837223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.304388] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.338306] env[62730]: DEBUG oslo_vmware.exceptions [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1520.338451] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.339013] env[62730]: ERROR nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1520.339013] env[62730]: Faults: ['InvalidArgument'] [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Traceback (most recent call last): [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] yield resources [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self.driver.spawn(context, instance, image_meta, [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self._fetch_image_if_missing(context, vi) [ 1520.339013] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] image_cache(vi, tmp_image_ds_loc) [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] vm_util.copy_virtual_disk( [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] session._wait_for_task(vmdk_copy_task) [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] return self.wait_for_task(task_ref) [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] return evt.wait() [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] result = hub.switch() [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1520.339360] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] return self.greenlet.switch() [ 1520.339724] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1520.339724] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self.f(*self.args, **self.kw) [ 1520.339724] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1520.339724] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] raise exceptions.translate_fault(task_info.error) [ 1520.339724] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1520.339724] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Faults: ['InvalidArgument'] [ 1520.339724] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] [ 1520.339724] env[62730]: INFO nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Terminating instance [ 1520.340954] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.341188] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1520.341438] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f3fb65f-d110-4f6c-bc01-8d0bccfabba3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.343744] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1520.343941] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1520.344677] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1171db2b-9ee7-49c5-b895-143b806c09c9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.351769] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1520.352037] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-371cf21a-ddad-4abd-9c2f-6e5e9cf64c67 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.356468] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1520.356660] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1520.357653] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c34a3f6-00f3-419a-8ff3-ce50ed5cbc3d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.362785] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1520.362785] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5245f573-3a78-5f03-6f09-1c319600d890" [ 1520.362785] env[62730]: _type = "Task" [ 1520.362785] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.371009] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5245f573-3a78-5f03-6f09-1c319600d890, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.426676] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1520.427013] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1520.427255] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Deleting the datastore file [datastore2] c2ac09ea-97ae-4e73-9ecb-010241e231f9 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1520.427670] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad7275e9-4cbf-4071-aa69-68f5667a06ff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.435058] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Waiting for the task: (returnval){ [ 1520.435058] env[62730]: value = "task-4837225" [ 1520.435058] env[62730]: _type = "Task" [ 1520.435058] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.444508] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Task: {'id': task-4837225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.737558] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.737729] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1520.737856] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1520.758058] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.758306] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.758455] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.758627] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.758764] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: e8657fe0-3db2-4768-817f-944a736da401] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.758888] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.759018] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.759156] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.759339] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.759491] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1520.759669] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1520.760167] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.873030] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1520.873397] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating directory with path [datastore2] vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1520.873540] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a098727-3282-4241-8d5a-0c22a984f648 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.886080] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Created directory with path [datastore2] vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1520.886306] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Fetch image to [datastore2] vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1520.886480] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1520.887268] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03faceed-03b8-4953-b4c7-24025a3db381 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.895008] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21069e44-f3de-41dd-a8c2-84e76444bc64 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.905061] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33b02f5-ee04-4ae8-a66f-552d6946d306 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.940120] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c893715f-4d65-418a-8f9e-79b9698da457 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.948166] env[62730]: DEBUG oslo_vmware.api [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Task: {'id': task-4837225, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064337} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.949681] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1520.949883] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1520.950085] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1520.950272] env[62730]: INFO nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1520.952121] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-db5da516-c372-49c7-9aa2-3521070cdc1b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.953988] env[62730]: DEBUG nova.compute.claims [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1520.954183] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.954398] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.975625] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1521.032667] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1521.091449] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1521.091652] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1521.273187] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b727edb-e4ee-45e4-9ae6-0f6d7eb6a391 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.281504] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d453c2-c748-4c62-a882-3b315f2d53d1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.313718] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd0362b-0c7a-489f-8179-630e07ff18a3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.321265] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c927829-0dd1-4c52-8a3d-d375ff893d1f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.334478] env[62730]: DEBUG nova.compute.provider_tree [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.342787] env[62730]: DEBUG nova.scheduler.client.report [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1521.356342] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.402s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.356887] env[62730]: ERROR nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1521.356887] env[62730]: Faults: ['InvalidArgument'] [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Traceback (most recent call last): [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self.driver.spawn(context, instance, image_meta, [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self._fetch_image_if_missing(context, vi) [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] image_cache(vi, tmp_image_ds_loc) [ 1521.356887] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] vm_util.copy_virtual_disk( [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] session._wait_for_task(vmdk_copy_task) [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] return self.wait_for_task(task_ref) [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] return evt.wait() [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] result = hub.switch() [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] return self.greenlet.switch() [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1521.357244] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] self.f(*self.args, **self.kw) [ 1521.357622] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1521.357622] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] raise exceptions.translate_fault(task_info.error) [ 1521.357622] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1521.357622] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Faults: ['InvalidArgument'] [ 1521.357622] env[62730]: ERROR nova.compute.manager [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] [ 1521.357752] env[62730]: DEBUG nova.compute.utils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1521.359232] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Build of instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 was re-scheduled: A specified parameter was not correct: fileType [ 1521.359232] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1521.359697] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1521.359885] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1521.360079] env[62730]: DEBUG nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1521.360252] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1521.666685] env[62730]: DEBUG nova.network.neutron [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.680729] env[62730]: INFO nova.compute.manager [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Took 0.32 seconds to deallocate network for instance. [ 1521.799406] env[62730]: INFO nova.scheduler.client.report [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Deleted allocations for instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 [ 1521.830226] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d2d17bde-5e7f-4b36-86a8-07723d47c33b tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.204s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.831486] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 427.779s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.831682] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] During sync_power_state the instance has a pending task (spawning). Skip. [ 1521.831868] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.832387] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.433s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.832608] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Acquiring lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.832816] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.832996] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.835119] env[62730]: INFO nova.compute.manager [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Terminating instance [ 1521.836857] env[62730]: DEBUG nova.compute.manager [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1521.837083] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1521.837350] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e926314b-6a40-4d62-935f-118d442421f1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.845041] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1521.852337] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2be3526-65e1-40ec-a9f8-7a8f96199857 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.884309] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2ac09ea-97ae-4e73-9ecb-010241e231f9 could not be found. [ 1521.884580] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1521.884709] env[62730]: INFO nova.compute.manager [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1521.884968] env[62730]: DEBUG oslo.service.loopingcall [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.887248] env[62730]: DEBUG nova.compute.manager [-] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1521.887355] env[62730]: DEBUG nova.network.neutron [-] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1521.904609] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.905052] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.906649] env[62730]: INFO nova.compute.claims [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1521.925035] env[62730]: DEBUG nova.network.neutron [-] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.946868] env[62730]: INFO nova.compute.manager [-] [instance: c2ac09ea-97ae-4e73-9ecb-010241e231f9] Took 0.06 seconds to deallocate network for instance. [ 1522.041423] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5a22eb42-0213-4a1d-a8df-7778a865c518 tempest-ImagesOneServerTestJSON-1657823441 tempest-ImagesOneServerTestJSON-1657823441-project-member] Lock "c2ac09ea-97ae-4e73-9ecb-010241e231f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.174078] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e205a0-f71a-41d7-a3a8-4b344f017924 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.182397] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e89e900-9208-42ee-a0c2-26f532d73566 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.212903] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aae980e-b371-4d33-8970-7a068fe0d004 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.222404] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae082e80-03b9-48d2-aded-3bd3cfc17cd8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.238131] env[62730]: DEBUG nova.compute.provider_tree [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1522.248113] env[62730]: DEBUG nova.scheduler.client.report [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1522.264791] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.360s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.265380] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1522.300583] env[62730]: DEBUG nova.compute.utils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1522.301933] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1522.302132] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1522.312929] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1522.379872] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1522.406517] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1522.406756] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1522.406916] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.407204] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1522.407361] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.407513] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1522.407722] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1522.407884] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1522.408149] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1522.408350] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1522.408530] env[62730]: DEBUG nova.virt.hardware [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1522.409423] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fc50f1-ccf6-41f4-859d-121b8ec6ff0e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.418301] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1fc316-13aa-45ca-9d0d-895878f29e16 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.632514] env[62730]: DEBUG nova.policy [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18e47ee02b564e809516edbb7c267817', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47edc70d81cc4ea68d8da7bec4c625d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1523.153172] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Successfully created port: bd9ac448-225e-4e78-bebc-1177b7590af7 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1523.957747] env[62730]: DEBUG nova.compute.manager [req-a65d6e71-fd11-44d5-9574-66952d0a2aed req-718c3e1a-738e-41c5-ba1e-d87d1db29bc4 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Received event network-vif-plugged-bd9ac448-225e-4e78-bebc-1177b7590af7 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1523.957983] env[62730]: DEBUG oslo_concurrency.lockutils [req-a65d6e71-fd11-44d5-9574-66952d0a2aed req-718c3e1a-738e-41c5-ba1e-d87d1db29bc4 service nova] Acquiring lock "a5a39785-b18a-4d18-a0af-8b4065c354f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.958244] env[62730]: DEBUG oslo_concurrency.lockutils [req-a65d6e71-fd11-44d5-9574-66952d0a2aed req-718c3e1a-738e-41c5-ba1e-d87d1db29bc4 service nova] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.958423] env[62730]: DEBUG oslo_concurrency.lockutils [req-a65d6e71-fd11-44d5-9574-66952d0a2aed req-718c3e1a-738e-41c5-ba1e-d87d1db29bc4 service nova] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.958600] env[62730]: DEBUG nova.compute.manager [req-a65d6e71-fd11-44d5-9574-66952d0a2aed req-718c3e1a-738e-41c5-ba1e-d87d1db29bc4 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] No waiting events found dispatching network-vif-plugged-bd9ac448-225e-4e78-bebc-1177b7590af7 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1523.958769] env[62730]: WARNING nova.compute.manager [req-a65d6e71-fd11-44d5-9574-66952d0a2aed req-718c3e1a-738e-41c5-ba1e-d87d1db29bc4 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Received unexpected event network-vif-plugged-bd9ac448-225e-4e78-bebc-1177b7590af7 for instance with vm_state building and task_state spawning. [ 1524.059938] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Successfully updated port: bd9ac448-225e-4e78-bebc-1177b7590af7 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1524.071274] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "refresh_cache-a5a39785-b18a-4d18-a0af-8b4065c354f2" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.071428] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired lock "refresh_cache-a5a39785-b18a-4d18-a0af-8b4065c354f2" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.071591] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1524.117207] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1524.435963] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Updating instance_info_cache with network_info: [{"id": "bd9ac448-225e-4e78-bebc-1177b7590af7", "address": "fa:16:3e:4a:6b:50", "network": {"id": "648e3de4-36c9-4c09-8725-85988b2e227f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1080900941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47edc70d81cc4ea68d8da7bec4c625d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9ac448-22", "ovs_interfaceid": "bd9ac448-225e-4e78-bebc-1177b7590af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.451234] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Releasing lock "refresh_cache-a5a39785-b18a-4d18-a0af-8b4065c354f2" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.451598] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Instance network_info: |[{"id": "bd9ac448-225e-4e78-bebc-1177b7590af7", "address": "fa:16:3e:4a:6b:50", "network": {"id": "648e3de4-36c9-4c09-8725-85988b2e227f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1080900941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47edc70d81cc4ea68d8da7bec4c625d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9ac448-22", "ovs_interfaceid": "bd9ac448-225e-4e78-bebc-1177b7590af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1524.451981] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:6b:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd9ac448-225e-4e78-bebc-1177b7590af7', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1524.459672] env[62730]: DEBUG oslo.service.loopingcall [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.460218] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1524.460451] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e970623-19c8-4f45-ae70-b654274b13bc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.482911] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1524.482911] env[62730]: value = "task-4837226" [ 1524.482911] env[62730]: _type = "Task" [ 1524.482911] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.491083] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837226, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.737307] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1524.737614] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1524.992994] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837226, 'name': CreateVM_Task, 'duration_secs': 0.31105} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.993183] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1524.993768] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.993934] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.994270] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1524.994520] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da03ae13-9838-44a3-963a-4758acb2605b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.999635] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 1524.999635] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52bf46eb-f2b1-28ba-2e5f-8dd865b679ae" [ 1524.999635] env[62730]: _type = "Task" [ 1524.999635] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.009783] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52bf46eb-f2b1-28ba-2e5f-8dd865b679ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.510683] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.511016] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1525.511253] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.996620] env[62730]: DEBUG nova.compute.manager [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Received event network-changed-bd9ac448-225e-4e78-bebc-1177b7590af7 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1525.996847] env[62730]: DEBUG nova.compute.manager [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Refreshing instance network info cache due to event network-changed-bd9ac448-225e-4e78-bebc-1177b7590af7. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1525.997112] env[62730]: DEBUG oslo_concurrency.lockutils [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] Acquiring lock "refresh_cache-a5a39785-b18a-4d18-a0af-8b4065c354f2" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.997269] env[62730]: DEBUG oslo_concurrency.lockutils [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] Acquired lock "refresh_cache-a5a39785-b18a-4d18-a0af-8b4065c354f2" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.997441] env[62730]: DEBUG nova.network.neutron [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Refreshing network info cache for port bd9ac448-225e-4e78-bebc-1177b7590af7 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1526.353669] env[62730]: DEBUG nova.network.neutron [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Updated VIF entry in instance network info cache for port bd9ac448-225e-4e78-bebc-1177b7590af7. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1526.354054] env[62730]: DEBUG nova.network.neutron [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Updating instance_info_cache with network_info: [{"id": "bd9ac448-225e-4e78-bebc-1177b7590af7", "address": "fa:16:3e:4a:6b:50", "network": {"id": "648e3de4-36c9-4c09-8725-85988b2e227f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1080900941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47edc70d81cc4ea68d8da7bec4c625d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9ac448-22", "ovs_interfaceid": "bd9ac448-225e-4e78-bebc-1177b7590af7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.364256] env[62730]: DEBUG oslo_concurrency.lockutils [req-0ded8e1a-4c3e-42e3-85aa-9ef120212aa4 req-e01198a3-0db6-4dc2-bbc7-2f3f93afe457 service nova] Releasing lock "refresh_cache-a5a39785-b18a-4d18-a0af-8b4065c354f2" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.788048] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.941661] env[62730]: WARNING oslo_vmware.rw_handles [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1566.941661] env[62730]: ERROR oslo_vmware.rw_handles [ 1566.942892] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1566.944141] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1566.944399] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Copying Virtual Disk [datastore2] vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/ba15e1f4-b5b3-4b1d-a49d-39889adc5e5d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1566.944685] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c74edb54-2700-46c2-b0b4-91c230ee78cb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.953227] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1566.953227] env[62730]: value = "task-4837227" [ 1566.953227] env[62730]: _type = "Task" [ 1566.953227] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.961565] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': task-4837227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.464068] env[62730]: DEBUG oslo_vmware.exceptions [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1567.464310] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.464857] env[62730]: ERROR nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.464857] env[62730]: Faults: ['InvalidArgument'] [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Traceback (most recent call last): [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] yield resources [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self.driver.spawn(context, instance, image_meta, [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self._fetch_image_if_missing(context, vi) [ 1567.464857] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] image_cache(vi, tmp_image_ds_loc) [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] vm_util.copy_virtual_disk( [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] session._wait_for_task(vmdk_copy_task) [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] return self.wait_for_task(task_ref) [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] return evt.wait() [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] result = hub.switch() [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1567.465277] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] return self.greenlet.switch() [ 1567.465679] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1567.465679] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self.f(*self.args, **self.kw) [ 1567.465679] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1567.465679] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] raise exceptions.translate_fault(task_info.error) [ 1567.465679] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.465679] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Faults: ['InvalidArgument'] [ 1567.465679] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] [ 1567.465679] env[62730]: INFO nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Terminating instance [ 1567.466771] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.466986] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.467244] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6aae657-bc5f-4d21-92aa-64d6a0a5913b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.469489] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1567.469750] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1567.470577] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c4a4af-94cc-46d5-96e0-cbaa5c7f121c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.478543] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1567.478787] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85566ad9-b422-446b-bf9d-e0b258cbbd2d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.481218] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.481385] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1567.482534] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-798b12b7-b752-4432-a8ef-1ddf67136b6f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.487468] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Waiting for the task: (returnval){ [ 1567.487468] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52622258-efb1-db61-663a-6a2384b86292" [ 1567.487468] env[62730]: _type = "Task" [ 1567.487468] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.496606] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52622258-efb1-db61-663a-6a2384b86292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.549756] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1567.549994] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1567.550256] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Deleting the datastore file [datastore2] 3a61955c-d6df-4024-bc41-b1100a89fd7f {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.550672] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-000a84a8-9aa5-41aa-bed3-eb681a14310f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.558426] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1567.558426] env[62730]: value = "task-4837229" [ 1567.558426] env[62730]: _type = "Task" [ 1567.558426] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.566718] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': task-4837229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.997536] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1567.997918] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Creating directory with path [datastore2] vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.998066] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0f34db4-e887-440f-9085-0565656437fc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.010502] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Created directory with path [datastore2] vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1568.010722] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Fetch image to [datastore2] vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1568.010905] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1568.011717] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71135a37-5ad9-482b-9e2a-50ba38266a23 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.019124] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a1cbce-ba41-4c87-8890-b463a3b22c22 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.028923] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe518e3-2809-45f3-993b-89b00b21c397 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.062443] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80b4b35-460c-42d3-8902-d66d10ca0454 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.071420] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b5bdb6fc-650d-41e2-b823-384e8a02ec8e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.073130] env[62730]: DEBUG oslo_vmware.api [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': task-4837229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076693} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.073377] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1568.073566] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1568.073746] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1568.073924] env[62730]: INFO nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1568.076142] env[62730]: DEBUG nova.compute.claims [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1568.076367] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.076705] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.096273] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1568.154462] env[62730]: DEBUG oslo_vmware.rw_handles [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1568.214705] env[62730]: DEBUG oslo_vmware.rw_handles [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1568.214922] env[62730]: DEBUG oslo_vmware.rw_handles [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1568.386824] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7737a9c-7936-4657-9835-e02b3cb8cf99 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.395193] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a118af-4e28-4e17-b6df-6db5bcd89c31 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.426330] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2561b70a-c553-44d0-b436-3f2028de0412 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.434225] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d4ec15-2f47-48b1-81e4-134af757d8bc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.449085] env[62730]: DEBUG nova.compute.provider_tree [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.457959] env[62730]: DEBUG nova.scheduler.client.report [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1568.472148] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.395s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.472693] env[62730]: ERROR nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1568.472693] env[62730]: Faults: ['InvalidArgument'] [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Traceback (most recent call last): [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self.driver.spawn(context, instance, image_meta, [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self._fetch_image_if_missing(context, vi) [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] image_cache(vi, tmp_image_ds_loc) [ 1568.472693] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] vm_util.copy_virtual_disk( [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] session._wait_for_task(vmdk_copy_task) [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] return self.wait_for_task(task_ref) [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] return evt.wait() [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] result = hub.switch() [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] return self.greenlet.switch() [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1568.473094] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] self.f(*self.args, **self.kw) [ 1568.473464] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1568.473464] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] raise exceptions.translate_fault(task_info.error) [ 1568.473464] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1568.473464] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Faults: ['InvalidArgument'] [ 1568.473464] env[62730]: ERROR nova.compute.manager [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] [ 1568.473613] env[62730]: DEBUG nova.compute.utils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1568.474911] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Build of instance 3a61955c-d6df-4024-bc41-b1100a89fd7f was re-scheduled: A specified parameter was not correct: fileType [ 1568.474911] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1568.475306] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1568.475485] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1568.475660] env[62730]: DEBUG nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1568.475840] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1568.893914] env[62730]: DEBUG nova.network.neutron [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.908246] env[62730]: INFO nova.compute.manager [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Took 0.43 seconds to deallocate network for instance. [ 1569.016067] env[62730]: INFO nova.scheduler.client.report [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Deleted allocations for instance 3a61955c-d6df-4024-bc41-b1100a89fd7f [ 1569.041870] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d5ad4616-1ac2-4857-a39d-69a6ff67502a tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 599.327s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.043205] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 402.831s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.043449] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "3a61955c-d6df-4024-bc41-b1100a89fd7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.043825] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.044051] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.046178] env[62730]: INFO nova.compute.manager [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Terminating instance [ 1569.047937] env[62730]: DEBUG nova.compute.manager [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1569.048145] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1569.048647] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-014c4e5d-8e4e-4d45-868e-0c0399dacba3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.058208] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c5f752-0c8a-4378-b4ab-db42b864ba79 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.070047] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1569.094408] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a61955c-d6df-4024-bc41-b1100a89fd7f could not be found. [ 1569.094667] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1569.094864] env[62730]: INFO nova.compute.manager [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1569.095136] env[62730]: DEBUG oslo.service.loopingcall [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.095398] env[62730]: DEBUG nova.compute.manager [-] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1569.095491] env[62730]: DEBUG nova.network.neutron [-] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1569.124344] env[62730]: DEBUG nova.network.neutron [-] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.126162] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.126392] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.127978] env[62730]: INFO nova.compute.claims [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1569.133309] env[62730]: INFO nova.compute.manager [-] [instance: 3a61955c-d6df-4024-bc41-b1100a89fd7f] Took 0.04 seconds to deallocate network for instance. [ 1569.228604] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e2fffd31-0fa4-4451-9fff-fb43de61edc6 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "3a61955c-d6df-4024-bc41-b1100a89fd7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.390663] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b263a56-3f40-4157-b7e6-fd4b7cd3ebd5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.398684] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7c9073-fa03-4e57-836a-684429216add {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.429196] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee8f551-4a97-4eaf-8602-a24ed7621954 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.437321] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efcf744a-1b45-424c-83a6-30e4d9110b21 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.452272] env[62730]: DEBUG nova.compute.provider_tree [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.461935] env[62730]: DEBUG nova.scheduler.client.report [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1569.475633] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.349s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.476200] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1569.514538] env[62730]: DEBUG nova.compute.utils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1569.516015] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1569.516348] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1569.528638] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1569.603756] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1569.607637] env[62730]: DEBUG nova.policy [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d5721807d854d39bb64f02653e31e8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70e4ccdd17d64e0da492ff6c4b0f79d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1569.630035] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.630338] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.630531] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.630698] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.630850] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.631009] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.632406] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.632584] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.632766] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.632934] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.633168] env[62730]: DEBUG nova.virt.hardware [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.634084] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00745e89-2e29-4987-988e-8bf33bdca0b0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.642973] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b841cac3-09a3-4ca4-a13e-bd500df4b554 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.147308] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Successfully created port: 5a6192a1-e3a2-4630-9692-69988a82e48d {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1571.308318] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Successfully updated port: 5a6192a1-e3a2-4630-9692-69988a82e48d {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1571.322588] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "refresh_cache-8d18fd69-cdaf-470c-b942-cd00c66f45ea" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.322814] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquired lock "refresh_cache-8d18fd69-cdaf-470c-b942-cd00c66f45ea" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.322890] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1571.388385] env[62730]: DEBUG nova.compute.manager [req-7e61f0d2-5f7a-4d77-a472-2356cb277c01 req-c5c84a91-6111-48b4-8025-f284d6750a27 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Received event network-vif-plugged-5a6192a1-e3a2-4630-9692-69988a82e48d {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1571.388609] env[62730]: DEBUG oslo_concurrency.lockutils [req-7e61f0d2-5f7a-4d77-a472-2356cb277c01 req-c5c84a91-6111-48b4-8025-f284d6750a27 service nova] Acquiring lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.388819] env[62730]: DEBUG oslo_concurrency.lockutils [req-7e61f0d2-5f7a-4d77-a472-2356cb277c01 req-c5c84a91-6111-48b4-8025-f284d6750a27 service nova] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.388992] env[62730]: DEBUG oslo_concurrency.lockutils [req-7e61f0d2-5f7a-4d77-a472-2356cb277c01 req-c5c84a91-6111-48b4-8025-f284d6750a27 service nova] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.389753] env[62730]: DEBUG nova.compute.manager [req-7e61f0d2-5f7a-4d77-a472-2356cb277c01 req-c5c84a91-6111-48b4-8025-f284d6750a27 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] No waiting events found dispatching network-vif-plugged-5a6192a1-e3a2-4630-9692-69988a82e48d {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1571.390077] env[62730]: WARNING nova.compute.manager [req-7e61f0d2-5f7a-4d77-a472-2356cb277c01 req-c5c84a91-6111-48b4-8025-f284d6750a27 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Received unexpected event network-vif-plugged-5a6192a1-e3a2-4630-9692-69988a82e48d for instance with vm_state building and task_state spawning. [ 1571.408581] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1571.664180] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Updating instance_info_cache with network_info: [{"id": "5a6192a1-e3a2-4630-9692-69988a82e48d", "address": "fa:16:3e:7b:c7:a9", "network": {"id": "6d9ad06a-24eb-4ce6-b3c6-db26e32b0085", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-462341590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70e4ccdd17d64e0da492ff6c4b0f79d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a6192a1-e3", "ovs_interfaceid": "5a6192a1-e3a2-4630-9692-69988a82e48d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.675926] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Releasing lock "refresh_cache-8d18fd69-cdaf-470c-b942-cd00c66f45ea" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.676314] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Instance network_info: |[{"id": "5a6192a1-e3a2-4630-9692-69988a82e48d", "address": "fa:16:3e:7b:c7:a9", "network": {"id": "6d9ad06a-24eb-4ce6-b3c6-db26e32b0085", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-462341590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70e4ccdd17d64e0da492ff6c4b0f79d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a6192a1-e3", "ovs_interfaceid": "5a6192a1-e3a2-4630-9692-69988a82e48d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1571.676793] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:c7:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2c68e7-b690-42e2-9491-c3f9357cc66a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a6192a1-e3a2-4630-9692-69988a82e48d', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1571.684595] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Creating folder: Project (70e4ccdd17d64e0da492ff6c4b0f79d1). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1571.685650] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8b2c4ae-4bbf-4afd-aa62-03e1bcdb00f5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.697528] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Created folder: Project (70e4ccdd17d64e0da492ff6c4b0f79d1) in parent group-v942928. [ 1571.697771] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Creating folder: Instances. Parent ref: group-v943018. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1571.697970] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dddf3050-c012-4d3b-81a3-4cb36e301b8f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.707262] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Created folder: Instances in parent group-v943018. [ 1571.707587] env[62730]: DEBUG oslo.service.loopingcall [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.707873] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1571.708187] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-115c3374-508e-4109-866f-0c8c128d1430 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.729322] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1571.729322] env[62730]: value = "task-4837232" [ 1571.729322] env[62730]: _type = "Task" [ 1571.729322] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.740103] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837232, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.241478] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837232, 'name': CreateVM_Task, 'duration_secs': 0.300002} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.241680] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1572.242307] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.242470] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.242801] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1572.243102] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f645a06-3983-498a-8ea9-31de24d05173 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.248306] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Waiting for the task: (returnval){ [ 1572.248306] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]522fc274-bf1b-c4bd-85ab-7e7edeb894ba" [ 1572.248306] env[62730]: _type = "Task" [ 1572.248306] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.257634] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]522fc274-bf1b-c4bd-85ab-7e7edeb894ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.759852] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.760218] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1572.760347] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.417950] env[62730]: DEBUG nova.compute.manager [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Received event network-changed-5a6192a1-e3a2-4630-9692-69988a82e48d {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1573.418104] env[62730]: DEBUG nova.compute.manager [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Refreshing instance network info cache due to event network-changed-5a6192a1-e3a2-4630-9692-69988a82e48d. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1573.419100] env[62730]: DEBUG oslo_concurrency.lockutils [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] Acquiring lock "refresh_cache-8d18fd69-cdaf-470c-b942-cd00c66f45ea" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.419100] env[62730]: DEBUG oslo_concurrency.lockutils [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] Acquired lock "refresh_cache-8d18fd69-cdaf-470c-b942-cd00c66f45ea" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.419100] env[62730]: DEBUG nova.network.neutron [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Refreshing network info cache for port 5a6192a1-e3a2-4630-9692-69988a82e48d {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1573.738351] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.947248] env[62730]: DEBUG nova.network.neutron [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Updated VIF entry in instance network info cache for port 5a6192a1-e3a2-4630-9692-69988a82e48d. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1573.947797] env[62730]: DEBUG nova.network.neutron [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Updating instance_info_cache with network_info: [{"id": "5a6192a1-e3a2-4630-9692-69988a82e48d", "address": "fa:16:3e:7b:c7:a9", "network": {"id": "6d9ad06a-24eb-4ce6-b3c6-db26e32b0085", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-462341590-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70e4ccdd17d64e0da492ff6c4b0f79d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a6192a1-e3", "ovs_interfaceid": "5a6192a1-e3a2-4630-9692-69988a82e48d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.959033] env[62730]: DEBUG oslo_concurrency.lockutils [req-a970ceb3-24a8-4bdc-a5bc-ed848608b763 req-81e47f17-3e75-4473-b164-b022f90aea61 service nova] Releasing lock "refresh_cache-8d18fd69-cdaf-470c-b942-cd00c66f45ea" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.737628] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.738215] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.959020] env[62730]: DEBUG oslo_concurrency.lockutils [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.733192] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.736877] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.749291] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.749562] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.750293] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.750534] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1578.752166] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b668e2d-4328-4b39-89ef-13ce115f83b6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.761717] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc1af23-a39b-4be8-84c0-22289c3cdf30 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.777165] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8f2bf2-14b2-43c9-a10f-730cee04136f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.784575] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29384cf4-5e45-4e0a-a8a4-6c249650ca1f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.816822] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180345MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1578.816988] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.817219] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.931998] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 435af367-8af8-4e07-b96a-923d32cc645e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.932192] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.932326] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.932455] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.932579] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.932701] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.932822] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.932939] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.933073] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.933205] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1578.946234] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 2a15c7f4-16ec-4238-ac95-8de298292584 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1578.957206] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8b5e2cd3-8cd3-4b78-b4df-72233fb3db57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1578.969559] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 50720565-689e-45e1-a17f-d4673844d6ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1578.981156] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 16e44e55-0d5c-407b-8a1f-b1ba0ed61dac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1578.992408] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1578.992638] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1578.994601] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '2', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_9992614978224ad7bd8ed947a0cf69bc': '1', 'io_workload': '10', 'num_task_spawning': '2', 'num_proj_06d1cb82c61344ebb38e2ef9a6c95a6c': '1', 'num_proj_93039e316cca49179277828e04a9ce61': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '2', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1579.194223] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033ef99a-a641-42b2-90b5-d057cf9a6520 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.202911] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3f5c1c-e20b-4043-a2fe-97f1b902e5bd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.234256] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f0b1a3-af58-41ca-9722-0159054a97b5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.242132] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c817295d-e92e-4a8c-880a-4a1149ad2fd6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.256014] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.264720] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1579.278984] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1579.279214] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.462s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.275376] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.739085] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1581.739963] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1582.739442] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1582.739442] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1582.739442] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1582.749658] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "adc5639c-773e-4deb-9387-004833e94507" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.750098] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "adc5639c-773e-4deb-9387-004833e94507" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.766721] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.766906] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.767255] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: e8657fe0-3db2-4768-817f-944a736da401] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.767437] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.767598] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.767760] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.767926] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.768140] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.768315] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.768490] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1582.768647] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1584.753747] env[62730]: DEBUG oslo_concurrency.lockutils [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.737830] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1586.737830] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1598.881802] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "4eeba36c-efe6-4050-953f-75669079a0e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.882356] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "4eeba36c-efe6-4050-953f-75669079a0e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.957318] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "c1dcad10-0c5a-4aca-8870-42569cfd4448" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.957641] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.961150] env[62730]: WARNING oslo_vmware.rw_handles [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1616.961150] env[62730]: ERROR oslo_vmware.rw_handles [ 1616.962318] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1616.963565] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1616.963810] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Copying Virtual Disk [datastore2] vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/699db450-d7dd-4e36-8bb1-90be21213609/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1616.964108] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59f7cf27-061f-496c-aebc-dd64ac869232 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.971712] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Waiting for the task: (returnval){ [ 1616.971712] env[62730]: value = "task-4837233" [ 1616.971712] env[62730]: _type = "Task" [ 1616.971712] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.981122] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Task: {'id': task-4837233, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.482483] env[62730]: DEBUG oslo_vmware.exceptions [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1617.482799] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.483365] env[62730]: ERROR nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1617.483365] env[62730]: Faults: ['InvalidArgument'] [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Traceback (most recent call last): [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] yield resources [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self.driver.spawn(context, instance, image_meta, [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self._fetch_image_if_missing(context, vi) [ 1617.483365] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] image_cache(vi, tmp_image_ds_loc) [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] vm_util.copy_virtual_disk( [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] session._wait_for_task(vmdk_copy_task) [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] return self.wait_for_task(task_ref) [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] return evt.wait() [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] result = hub.switch() [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1617.483702] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] return self.greenlet.switch() [ 1617.484136] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1617.484136] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self.f(*self.args, **self.kw) [ 1617.484136] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1617.484136] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] raise exceptions.translate_fault(task_info.error) [ 1617.484136] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1617.484136] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Faults: ['InvalidArgument'] [ 1617.484136] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] [ 1617.484136] env[62730]: INFO nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Terminating instance [ 1617.485773] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.485995] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1617.486638] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1617.486833] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1617.487083] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94a66c35-21d1-4cdc-b064-97d7dd684197 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.489404] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a684ab8a-9aed-4899-b187-a00f136a6359 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.496414] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1617.496620] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b1cfdc4-e435-410f-b297-e63bc8fce228 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.498927] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1617.499115] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1617.500171] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a15dd2a-0601-4fb5-8065-38283e3308d5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.506302] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Waiting for the task: (returnval){ [ 1617.506302] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d18d1f-27ee-3a40-d7c1-839ede1160f2" [ 1617.506302] env[62730]: _type = "Task" [ 1617.506302] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.518750] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52d18d1f-27ee-3a40-d7c1-839ede1160f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.566821] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1617.567064] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1617.567321] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Deleting the datastore file [datastore2] 435af367-8af8-4e07-b96a-923d32cc645e {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1617.567697] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f860c60d-2ef9-437e-99c7-290be867c77b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.574947] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Waiting for the task: (returnval){ [ 1617.574947] env[62730]: value = "task-4837235" [ 1617.574947] env[62730]: _type = "Task" [ 1617.574947] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.583200] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Task: {'id': task-4837235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.017134] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1618.017525] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Creating directory with path [datastore2] vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1618.017678] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2fa4bcd-6c0b-4075-8c30-771c4d970189 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.029629] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Created directory with path [datastore2] vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1618.029833] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Fetch image to [datastore2] vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1618.030010] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1618.030879] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6376bc38-fd43-4d51-adba-960a3eccd0ce {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.038076] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ac7b71-4720-44b9-885f-24dc7544dd93 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.047236] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ed49a1-afb4-4b97-b761-f1193e7d72f4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.081503] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b095aae7-327d-4af2-91cf-562ad088f6d6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.088728] env[62730]: DEBUG oslo_vmware.api [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Task: {'id': task-4837235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072068} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.090200] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1618.090397] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1618.090601] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1618.090805] env[62730]: INFO nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1618.092631] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2ccddc73-07d0-4138-8cd4-d1b1284f7c9f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.094602] env[62730]: DEBUG nova.compute.claims [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1618.094783] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.094998] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.117809] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1618.173948] env[62730]: DEBUG oslo_vmware.rw_handles [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1618.234789] env[62730]: DEBUG oslo_vmware.rw_handles [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1618.234942] env[62730]: DEBUG oslo_vmware.rw_handles [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1618.386104] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3750d480-1843-43b6-8f96-947535842b10 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.394404] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df322eab-29bd-4011-aec3-dd4355b7cd9a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.424060] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f903e37-452a-41b1-a8b5-8c2e99f8a695 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.431828] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e1691e-c38f-4b83-bdc3-f8453e9b1392 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.446595] env[62730]: DEBUG nova.compute.provider_tree [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1618.456203] env[62730]: DEBUG nova.scheduler.client.report [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1618.471527] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.376s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.472095] env[62730]: ERROR nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1618.472095] env[62730]: Faults: ['InvalidArgument'] [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Traceback (most recent call last): [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self.driver.spawn(context, instance, image_meta, [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self._fetch_image_if_missing(context, vi) [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] image_cache(vi, tmp_image_ds_loc) [ 1618.472095] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] vm_util.copy_virtual_disk( [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] session._wait_for_task(vmdk_copy_task) [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] return self.wait_for_task(task_ref) [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] return evt.wait() [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] result = hub.switch() [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] return self.greenlet.switch() [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1618.472543] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] self.f(*self.args, **self.kw) [ 1618.472861] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1618.472861] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] raise exceptions.translate_fault(task_info.error) [ 1618.472861] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1618.472861] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Faults: ['InvalidArgument'] [ 1618.472861] env[62730]: ERROR nova.compute.manager [instance: 435af367-8af8-4e07-b96a-923d32cc645e] [ 1618.472861] env[62730]: DEBUG nova.compute.utils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1618.474618] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Build of instance 435af367-8af8-4e07-b96a-923d32cc645e was re-scheduled: A specified parameter was not correct: fileType [ 1618.474618] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1618.475013] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1618.475226] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1618.475429] env[62730]: DEBUG nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1618.475598] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1618.968556] env[62730]: DEBUG nova.network.neutron [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.981716] env[62730]: INFO nova.compute.manager [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Took 0.51 seconds to deallocate network for instance. [ 1619.090412] env[62730]: INFO nova.scheduler.client.report [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Deleted allocations for instance 435af367-8af8-4e07-b96a-923d32cc645e [ 1619.113276] env[62730]: DEBUG oslo_concurrency.lockutils [None req-12c334ce-5c45-4dc4-9a42-ea0ad2410873 tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "435af367-8af8-4e07-b96a-923d32cc645e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.366s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.114528] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "435af367-8af8-4e07-b96a-923d32cc645e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.509s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.114799] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Acquiring lock "435af367-8af8-4e07-b96a-923d32cc645e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.115029] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "435af367-8af8-4e07-b96a-923d32cc645e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.115207] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "435af367-8af8-4e07-b96a-923d32cc645e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.117237] env[62730]: INFO nova.compute.manager [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Terminating instance [ 1619.119049] env[62730]: DEBUG nova.compute.manager [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1619.119251] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1619.119947] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48de9468-9f54-4c0f-8d69-3a49878eaa24 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.124476] env[62730]: DEBUG nova.compute.manager [None req-ea7314b7-4708-454e-81c3-16f016f1ef10 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 2a15c7f4-16ec-4238-ac95-8de298292584] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1619.131253] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1dc925-b424-4b92-a8f9-cf323e38fa66 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.149788] env[62730]: DEBUG nova.compute.manager [None req-ea7314b7-4708-454e-81c3-16f016f1ef10 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 2a15c7f4-16ec-4238-ac95-8de298292584] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1619.162152] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 435af367-8af8-4e07-b96a-923d32cc645e could not be found. [ 1619.162351] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1619.162565] env[62730]: INFO nova.compute.manager [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1619.162806] env[62730]: DEBUG oslo.service.loopingcall [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.163041] env[62730]: DEBUG nova.compute.manager [-] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1619.163144] env[62730]: DEBUG nova.network.neutron [-] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1619.178453] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ea7314b7-4708-454e-81c3-16f016f1ef10 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "2a15c7f4-16ec-4238-ac95-8de298292584" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.715s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.187699] env[62730]: DEBUG nova.network.neutron [-] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.189046] env[62730]: DEBUG nova.compute.manager [None req-2e48bcc2-f791-46b4-8d1f-f1d6acdac595 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] [instance: 8b5e2cd3-8cd3-4b78-b4df-72233fb3db57] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1619.196743] env[62730]: INFO nova.compute.manager [-] [instance: 435af367-8af8-4e07-b96a-923d32cc645e] Took 0.03 seconds to deallocate network for instance. [ 1619.212966] env[62730]: DEBUG nova.compute.manager [None req-2e48bcc2-f791-46b4-8d1f-f1d6acdac595 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] [instance: 8b5e2cd3-8cd3-4b78-b4df-72233fb3db57] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1619.235666] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2e48bcc2-f791-46b4-8d1f-f1d6acdac595 tempest-AttachVolumeNegativeTest-202054193 tempest-AttachVolumeNegativeTest-202054193-project-member] Lock "8b5e2cd3-8cd3-4b78-b4df-72233fb3db57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.509s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.245834] env[62730]: DEBUG nova.compute.manager [None req-34b5631b-ff30-43f0-9380-8c09b85ee05f tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 50720565-689e-45e1-a17f-d4673844d6ae] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1619.269071] env[62730]: DEBUG nova.compute.manager [None req-34b5631b-ff30-43f0-9380-8c09b85ee05f tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 50720565-689e-45e1-a17f-d4673844d6ae] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1619.289774] env[62730]: DEBUG oslo_concurrency.lockutils [None req-f0e474a0-f64b-47ad-a7c3-a01f1b4e241f tempest-InstanceActionsNegativeTestJSON-1067786159 tempest-InstanceActionsNegativeTestJSON-1067786159-project-member] Lock "435af367-8af8-4e07-b96a-923d32cc645e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.295558] env[62730]: DEBUG oslo_concurrency.lockutils [None req-34b5631b-ff30-43f0-9380-8c09b85ee05f tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "50720565-689e-45e1-a17f-d4673844d6ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.330s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.304972] env[62730]: DEBUG nova.compute.manager [None req-3fd328a7-da28-453d-9647-6a7611d25ba6 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: 16e44e55-0d5c-407b-8a1f-b1ba0ed61dac] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1619.329788] env[62730]: DEBUG nova.compute.manager [None req-3fd328a7-da28-453d-9647-6a7611d25ba6 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] [instance: 16e44e55-0d5c-407b-8a1f-b1ba0ed61dac] Instance disappeared before build. {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1619.351727] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3fd328a7-da28-453d-9647-6a7611d25ba6 tempest-ServerDiskConfigTestJSON-1240719153 tempest-ServerDiskConfigTestJSON-1240719153-project-member] Lock "16e44e55-0d5c-407b-8a1f-b1ba0ed61dac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.883s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.362778] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1619.414328] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.414590] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.416494] env[62730]: INFO nova.compute.claims [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1619.640825] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3333de34-3dd6-49fc-b055-429352ccac26 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.648934] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf200af4-8e71-43a9-bd60-0f25d1b5c55a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.678897] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd034a89-be15-4d26-8687-5e81865d08bd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.687281] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c156985b-75b2-43ed-ae6d-cdad4e844f6f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.702151] env[62730]: DEBUG nova.compute.provider_tree [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.713772] env[62730]: DEBUG nova.scheduler.client.report [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1619.744502] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.745205] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1619.785544] env[62730]: DEBUG nova.compute.utils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1619.786872] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1619.787064] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1619.797057] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1619.869971] env[62730]: DEBUG nova.policy [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1f6d1ba8e8348268c131b68444d86fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6181e6d67e74692b11bddb3c1ed2779', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1619.900720] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1619.928539] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1619.928792] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1619.928951] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1619.929151] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1619.929301] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1619.929448] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1619.929701] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1619.929868] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1619.930046] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1619.930214] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1619.930387] env[62730]: DEBUG nova.virt.hardware [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1619.931300] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c0a217-270d-487f-8636-9a99d21f650a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.939408] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f072d1b-0990-4ba0-9495-c71b7ed2432f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.255048] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Successfully created port: 7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.453043] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Successfully updated port: 7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1621.468963] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "refresh_cache-842e4145-ba83-48d5-8514-78532381eb2d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.468963] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquired lock "refresh_cache-842e4145-ba83-48d5-8514-78532381eb2d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.468963] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1621.549305] env[62730]: DEBUG nova.compute.manager [req-8dc987cf-1e79-4a2e-bb5a-47228d581f5b req-3e5d2286-1b71-4dfd-8d95-2ad51ef73d5d service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Received event network-vif-plugged-7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1621.549305] env[62730]: DEBUG oslo_concurrency.lockutils [req-8dc987cf-1e79-4a2e-bb5a-47228d581f5b req-3e5d2286-1b71-4dfd-8d95-2ad51ef73d5d service nova] Acquiring lock "842e4145-ba83-48d5-8514-78532381eb2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.549305] env[62730]: DEBUG oslo_concurrency.lockutils [req-8dc987cf-1e79-4a2e-bb5a-47228d581f5b req-3e5d2286-1b71-4dfd-8d95-2ad51ef73d5d service nova] Lock "842e4145-ba83-48d5-8514-78532381eb2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.549305] env[62730]: DEBUG oslo_concurrency.lockutils [req-8dc987cf-1e79-4a2e-bb5a-47228d581f5b req-3e5d2286-1b71-4dfd-8d95-2ad51ef73d5d service nova] Lock "842e4145-ba83-48d5-8514-78532381eb2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.549563] env[62730]: DEBUG nova.compute.manager [req-8dc987cf-1e79-4a2e-bb5a-47228d581f5b req-3e5d2286-1b71-4dfd-8d95-2ad51ef73d5d service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] No waiting events found dispatching network-vif-plugged-7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1621.549563] env[62730]: WARNING nova.compute.manager [req-8dc987cf-1e79-4a2e-bb5a-47228d581f5b req-3e5d2286-1b71-4dfd-8d95-2ad51ef73d5d service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Received unexpected event network-vif-plugged-7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e for instance with vm_state building and task_state spawning. [ 1621.556955] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1621.818952] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Updating instance_info_cache with network_info: [{"id": "7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e", "address": "fa:16:3e:e0:ef:74", "network": {"id": "71ccf346-c204-416b-95fc-178308fd169b", "bridge": "br-int", "label": "tempest-ServersTestJSON-164070943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6181e6d67e74692b11bddb3c1ed2779", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e65c8e6-d1", "ovs_interfaceid": "7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.830268] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Releasing lock "refresh_cache-842e4145-ba83-48d5-8514-78532381eb2d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.830612] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Instance network_info: |[{"id": "7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e", "address": "fa:16:3e:e0:ef:74", "network": {"id": "71ccf346-c204-416b-95fc-178308fd169b", "bridge": "br-int", "label": "tempest-ServersTestJSON-164070943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6181e6d67e74692b11bddb3c1ed2779", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e65c8e6-d1", "ovs_interfaceid": "7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1621.831086] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:ef:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd669e36a-5c9d-4fa4-92c8-90e7cb814262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1621.840205] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Creating folder: Project (c6181e6d67e74692b11bddb3c1ed2779). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1621.840981] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5049840-2222-49da-8999-ae7c33e6954b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.852803] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Created folder: Project (c6181e6d67e74692b11bddb3c1ed2779) in parent group-v942928. [ 1621.853028] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Creating folder: Instances. Parent ref: group-v943021. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1621.853281] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2965666a-0ec0-43fd-862a-7ac1ef5c7dde {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.862714] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Created folder: Instances in parent group-v943021. [ 1621.863045] env[62730]: DEBUG oslo.service.loopingcall [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1621.863249] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1621.863459] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa5d08bb-9e80-46da-b71c-e8da6a229c9e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.883173] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1621.883173] env[62730]: value = "task-4837238" [ 1621.883173] env[62730]: _type = "Task" [ 1621.883173] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.891221] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837238, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.393886] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837238, 'name': CreateVM_Task, 'duration_secs': 0.298952} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.394076] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1622.394752] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.394927] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.395272] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1622.395529] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6776e325-6a45-4372-8934-371eb043d597 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.400401] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Waiting for the task: (returnval){ [ 1622.400401] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]526d8a3a-6814-aa81-2302-de4e7a468d21" [ 1622.400401] env[62730]: _type = "Task" [ 1622.400401] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.409389] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]526d8a3a-6814-aa81-2302-de4e7a468d21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.912063] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.912403] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1622.912403] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.573359] env[62730]: DEBUG nova.compute.manager [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Received event network-changed-7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1623.573561] env[62730]: DEBUG nova.compute.manager [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Refreshing instance network info cache due to event network-changed-7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1623.573782] env[62730]: DEBUG oslo_concurrency.lockutils [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] Acquiring lock "refresh_cache-842e4145-ba83-48d5-8514-78532381eb2d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.574037] env[62730]: DEBUG oslo_concurrency.lockutils [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] Acquired lock "refresh_cache-842e4145-ba83-48d5-8514-78532381eb2d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.574283] env[62730]: DEBUG nova.network.neutron [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Refreshing network info cache for port 7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1623.910373] env[62730]: DEBUG nova.network.neutron [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Updated VIF entry in instance network info cache for port 7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1623.910758] env[62730]: DEBUG nova.network.neutron [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Updating instance_info_cache with network_info: [{"id": "7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e", "address": "fa:16:3e:e0:ef:74", "network": {"id": "71ccf346-c204-416b-95fc-178308fd169b", "bridge": "br-int", "label": "tempest-ServersTestJSON-164070943-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6181e6d67e74692b11bddb3c1ed2779", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e65c8e6-d1", "ovs_interfaceid": "7e65c8e6-d138-4fd2-a1e4-6a14b3dd383e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.920355] env[62730]: DEBUG oslo_concurrency.lockutils [req-321b77a7-725e-4303-ae5d-717a88f3c1f0 req-dcb08e76-2af7-411e-b6eb-e7f9d4c9b7a8 service nova] Releasing lock "refresh_cache-842e4145-ba83-48d5-8514-78532381eb2d" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.737660] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.732579] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.736577] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.737632] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.737531] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.750251] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.750251] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.750251] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.750251] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1640.751377] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fec1b2-9562-4905-9b82-aa57924fff63 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.760518] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2563d8d4-a25c-4bdd-8b55-24b00aee8208 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.776021] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b157a9-d815-4f6c-92fc-62bfe07ecc32 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.783683] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e754264-024e-4baa-9061-168f9c4126b1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.816561] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180528MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1640.816770] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.816916] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.894235] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.894235] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.894408] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.894530] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.894590] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.894708] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.895088] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.895088] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.895088] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.895247] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1640.906679] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1640.918033] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1640.929505] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1640.929748] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1640.929910] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '7', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '2', 'num_os_type_None': '10', 'num_proj_06d1cb82c61344ebb38e2ef9a6c95a6c': '1', 'io_workload': '10', 'num_task_deleting': '8', 'num_proj_93039e316cca49179277828e04a9ce61': '1', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '2', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1641.096413] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c533c647-a062-4056-afde-ab4a5de34b08 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.104825] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054fe3a6-d4a6-4948-a74b-a7c75e968645 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.135333] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e3610b-7b90-4a95-a160-1599c8b3fd1f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.143150] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8bb84d-6fb2-4794-8fac-5b1a1efa43c2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.156864] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.165108] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1641.180183] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1641.180374] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.363s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.180421] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.737941] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.738158] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1642.738331] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1642.760818] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.760979] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: e8657fe0-3db2-4768-817f-944a736da401] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.765941] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.766129] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.766266] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.766395] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.766517] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.766637] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.766757] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.766887] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1642.767016] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1643.737748] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.738344] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.738738] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1664.868366] env[62730]: WARNING oslo_vmware.rw_handles [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1664.868366] env[62730]: ERROR oslo_vmware.rw_handles [ 1664.869108] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1664.870929] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1664.871209] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Copying Virtual Disk [datastore2] vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/198bcc34-b898-4f6f-a82a-5ed53437e131/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1664.871511] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec991867-915a-4103-ab76-d32cce0e6cfd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.882028] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Waiting for the task: (returnval){ [ 1664.882028] env[62730]: value = "task-4837239" [ 1664.882028] env[62730]: _type = "Task" [ 1664.882028] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.888216] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Task: {'id': task-4837239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.390775] env[62730]: DEBUG oslo_vmware.exceptions [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1665.391099] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.391677] env[62730]: ERROR nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1665.391677] env[62730]: Faults: ['InvalidArgument'] [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Traceback (most recent call last): [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] yield resources [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self.driver.spawn(context, instance, image_meta, [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._fetch_image_if_missing(context, vi) [ 1665.391677] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] image_cache(vi, tmp_image_ds_loc) [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] vm_util.copy_virtual_disk( [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] session._wait_for_task(vmdk_copy_task) [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.wait_for_task(task_ref) [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return evt.wait() [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] result = hub.switch() [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1665.392119] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.greenlet.switch() [ 1665.392563] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1665.392563] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self.f(*self.args, **self.kw) [ 1665.392563] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1665.392563] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] raise exceptions.translate_fault(task_info.error) [ 1665.392563] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1665.392563] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Faults: ['InvalidArgument'] [ 1665.392563] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] [ 1665.392563] env[62730]: INFO nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Terminating instance [ 1665.393575] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.393785] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1665.394039] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-202814c5-ec69-4d74-a7df-d69aec14403e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.396116] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.396280] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquired lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.396450] env[62730]: DEBUG nova.network.neutron [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1665.403862] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1665.404049] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1665.405284] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72c7eba1-a95d-4184-b8ac-978b97ba0f50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.413291] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Waiting for the task: (returnval){ [ 1665.413291] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52e7df2f-9fa7-fc53-bb58-d1a1b4ba304e" [ 1665.413291] env[62730]: _type = "Task" [ 1665.413291] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.421371] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52e7df2f-9fa7-fc53-bb58-d1a1b4ba304e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.448078] env[62730]: DEBUG nova.network.neutron [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1665.511347] env[62730]: DEBUG nova.network.neutron [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.520636] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Releasing lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.521051] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1665.521248] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1665.522361] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8a8e17-92aa-4a66-8af2-294fb138d5eb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.530780] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1665.531065] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3ecdfc8-848a-4731-a333-72f44fa7ed14 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.573929] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1665.574423] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1665.574423] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Deleting the datastore file [datastore2] f1b4e7a6-83d8-40c6-9886-2991e91fbc34 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1665.574720] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc957fe5-1e5f-4e06-a4a4-9df584107a92 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.582293] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Waiting for the task: (returnval){ [ 1665.582293] env[62730]: value = "task-4837241" [ 1665.582293] env[62730]: _type = "Task" [ 1665.582293] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.591018] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Task: {'id': task-4837241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.924734] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1665.925050] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Creating directory with path [datastore2] vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1665.925292] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7113e9a6-284b-40ed-8118-b295af0eaf95 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.938564] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Created directory with path [datastore2] vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1665.938723] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Fetch image to [datastore2] vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1665.938798] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1665.939665] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dd9550-2b27-4bad-954b-0b73d6c014e0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.947130] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1d8339-f055-47f4-adb9-a35f496f70b3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.956415] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e0707d-03db-43be-8625-2f22cbe46742 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.989830] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2560fa-0e76-40aa-a5ed-134d30826223 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.996507] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ea1957e2-154a-436c-8a1d-dd07d36c4721 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.024720] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1666.076275] env[62730]: DEBUG oslo_vmware.rw_handles [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1666.136438] env[62730]: DEBUG oslo_vmware.rw_handles [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1666.136666] env[62730]: DEBUG oslo_vmware.rw_handles [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1666.140890] env[62730]: DEBUG oslo_vmware.api [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Task: {'id': task-4837241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032833} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.141179] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1666.141335] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1666.141542] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1666.141682] env[62730]: INFO nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1666.141948] env[62730]: DEBUG oslo.service.loopingcall [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1666.142236] env[62730]: DEBUG nova.compute.manager [-] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1666.144526] env[62730]: DEBUG nova.compute.claims [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1666.144733] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.144979] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.357389] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a349a25-1c87-4091-8855-08325fc73883 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.365658] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40199dd8-6a34-49e1-ad72-d92fb1f301a5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.396345] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69289cea-2309-4ece-a2f6-8f6a7c6e0315 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.403678] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b4600c-8f27-4008-9651-101808d540f0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.416804] env[62730]: DEBUG nova.compute.provider_tree [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1666.425598] env[62730]: DEBUG nova.scheduler.client.report [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1666.440450] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.295s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.440976] env[62730]: ERROR nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1666.440976] env[62730]: Faults: ['InvalidArgument'] [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Traceback (most recent call last): [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self.driver.spawn(context, instance, image_meta, [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._fetch_image_if_missing(context, vi) [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] image_cache(vi, tmp_image_ds_loc) [ 1666.440976] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] vm_util.copy_virtual_disk( [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] session._wait_for_task(vmdk_copy_task) [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.wait_for_task(task_ref) [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return evt.wait() [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] result = hub.switch() [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.greenlet.switch() [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1666.441347] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self.f(*self.args, **self.kw) [ 1666.441658] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1666.441658] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] raise exceptions.translate_fault(task_info.error) [ 1666.441658] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1666.441658] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Faults: ['InvalidArgument'] [ 1666.441658] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] [ 1666.441784] env[62730]: DEBUG nova.compute.utils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1666.443152] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Build of instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 was re-scheduled: A specified parameter was not correct: fileType [ 1666.443152] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1666.443524] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1666.443748] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.443891] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquired lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.444065] env[62730]: DEBUG nova.network.neutron [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1666.469861] env[62730]: DEBUG nova.network.neutron [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1666.530368] env[62730]: DEBUG nova.network.neutron [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.539263] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Releasing lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.539551] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1666.539768] env[62730]: DEBUG nova.compute.manager [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1666.623259] env[62730]: INFO nova.scheduler.client.report [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Deleted allocations for instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 [ 1666.644272] env[62730]: DEBUG oslo_concurrency.lockutils [None req-79010a34-198e-4012-8be7-4df8bcfa2db9 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.904s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.645457] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.184s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.645683] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.645906] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.646092] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.648061] env[62730]: INFO nova.compute.manager [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Terminating instance [ 1666.649590] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquiring lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.649754] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Acquired lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.649924] env[62730]: DEBUG nova.network.neutron [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1666.658798] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1666.676974] env[62730]: DEBUG nova.network.neutron [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1666.706627] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.706889] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.708776] env[62730]: INFO nova.compute.claims [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1666.739215] env[62730]: DEBUG nova.network.neutron [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.753677] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Releasing lock "refresh_cache-f1b4e7a6-83d8-40c6-9886-2991e91fbc34" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.754215] env[62730]: DEBUG nova.compute.manager [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1666.754421] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1666.754939] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ba3b24e-d7f9-446e-a95e-d1c5f4a899b5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.767842] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9585d5c6-c8de-4d84-a8f1-b60c38f1365f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.800496] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f1b4e7a6-83d8-40c6-9886-2991e91fbc34 could not be found. [ 1666.800706] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1666.800886] env[62730]: INFO nova.compute.manager [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1666.801165] env[62730]: DEBUG oslo.service.loopingcall [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1666.803732] env[62730]: DEBUG nova.compute.manager [-] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1666.803837] env[62730]: DEBUG nova.network.neutron [-] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1666.897194] env[62730]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62730) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1666.897436] env[62730]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-ea7f870c-5d56-4283-af45-991cfd46436e'] [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1666.897976] env[62730]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1666.898460] env[62730]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1666.899024] env[62730]: ERROR oslo.service.loopingcall [ 1666.899533] env[62730]: ERROR nova.compute.manager [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1666.934457] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6d78e5-d34f-4767-8c7a-cdf8157b2a4f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.939500] env[62730]: ERROR nova.compute.manager [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Traceback (most recent call last): [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] ret = obj(*args, **kwargs) [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] exception_handler_v20(status_code, error_body) [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] raise client_exc(message=error_message, [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Neutron server returns request_ids: ['req-ea7f870c-5d56-4283-af45-991cfd46436e'] [ 1666.939500] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] During handling of the above exception, another exception occurred: [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Traceback (most recent call last): [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._delete_instance(context, instance, bdms) [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._shutdown_instance(context, instance, bdms) [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._try_deallocate_network(context, instance, requested_networks) [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] with excutils.save_and_reraise_exception(): [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1666.939831] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self.force_reraise() [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] raise self.value [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] _deallocate_network_with_retries() [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return evt.wait() [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] result = hub.switch() [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.greenlet.switch() [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1666.940335] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] result = func(*self.args, **self.kw) [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] result = f(*args, **kwargs) [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._deallocate_network( [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self.network_api.deallocate_for_instance( [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] data = neutron.list_ports(**search_opts) [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] ret = obj(*args, **kwargs) [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.list('ports', self.ports_path, retrieve_all, [ 1666.940925] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] ret = obj(*args, **kwargs) [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] for r in self._pagination(collection, path, **params): [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] res = self.get(path, params=params) [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] ret = obj(*args, **kwargs) [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.retry_request("GET", action, body=body, [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] ret = obj(*args, **kwargs) [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1666.941600] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] return self.do_request(method, action, body=body, [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] ret = obj(*args, **kwargs) [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] self._handle_fault_response(status_code, replybody, resp) [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1666.942216] env[62730]: ERROR nova.compute.manager [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] [ 1666.948266] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc08420-a7ed-47e7-a301-1a6ec5b3bd75 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.980790] env[62730]: DEBUG oslo_concurrency.lockutils [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Lock "f1b4e7a6-83d8-40c6-9886-2991e91fbc34" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.335s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.982874] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb03161-083d-4cbb-b880-fa39397aacfd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.993866] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5618be3-9e41-4a87-b38a-6cb42016012e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.010052] env[62730]: DEBUG nova.compute.provider_tree [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.020183] env[62730]: DEBUG nova.scheduler.client.report [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1667.035891] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.329s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.036433] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1667.056816] env[62730]: INFO nova.compute.manager [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] [instance: f1b4e7a6-83d8-40c6-9886-2991e91fbc34] Successfully reverted task state from None on failure for instance. [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server [None req-956cb513-2777-4067-9432-0deb3e566f12 tempest-ServerShowV257Test-342034785 tempest-ServerShowV257Test-342034785-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-ea7f870c-5d56-4283-af45-991cfd46436e'] [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1667.060663] env[62730]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1667.061250] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1667.061722] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1667.062160] env[62730]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1667.062588] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1667.063042] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1667.063479] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1667.063479] env[62730]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1667.063479] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1667.063479] env[62730]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1667.063479] env[62730]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1667.063479] env[62730]: ERROR oslo_messaging.rpc.server [ 1667.077249] env[62730]: DEBUG nova.compute.utils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1667.078391] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1667.078571] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1667.091533] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1667.161519] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1667.166627] env[62730]: DEBUG nova.policy [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6b43819738c4aa39def73b950405e0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '350dbc45d12e4bd3a2bd888b484b3173', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1667.187345] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1667.187670] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1667.187853] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1667.188057] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1667.188214] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1667.188365] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1667.188578] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1667.188742] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1667.188910] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1667.189112] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1667.189275] env[62730]: DEBUG nova.virt.hardware [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1667.190206] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7b6d34-4402-4e9f-bc94-f781c031a0e5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.198403] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21accc5d-82dc-4451-8164-1e702ac3ff2a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.671451] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Successfully created port: 1445af3a-a397-4188-803f-b804bdb8492b {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1668.791238] env[62730]: DEBUG nova.compute.manager [req-ea107591-3374-4b5e-9b41-6b9cb3f8e230 req-a0147767-2e19-4853-a84b-c2afd3069d92 service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] Received event network-vif-plugged-1445af3a-a397-4188-803f-b804bdb8492b {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1668.791238] env[62730]: DEBUG oslo_concurrency.lockutils [req-ea107591-3374-4b5e-9b41-6b9cb3f8e230 req-a0147767-2e19-4853-a84b-c2afd3069d92 service nova] Acquiring lock "adc5639c-773e-4deb-9387-004833e94507-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.791238] env[62730]: DEBUG oslo_concurrency.lockutils [req-ea107591-3374-4b5e-9b41-6b9cb3f8e230 req-a0147767-2e19-4853-a84b-c2afd3069d92 service nova] Lock "adc5639c-773e-4deb-9387-004833e94507-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.791799] env[62730]: DEBUG oslo_concurrency.lockutils [req-ea107591-3374-4b5e-9b41-6b9cb3f8e230 req-a0147767-2e19-4853-a84b-c2afd3069d92 service nova] Lock "adc5639c-773e-4deb-9387-004833e94507-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.792126] env[62730]: DEBUG nova.compute.manager [req-ea107591-3374-4b5e-9b41-6b9cb3f8e230 req-a0147767-2e19-4853-a84b-c2afd3069d92 service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] No waiting events found dispatching network-vif-plugged-1445af3a-a397-4188-803f-b804bdb8492b {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1668.792989] env[62730]: WARNING nova.compute.manager [req-ea107591-3374-4b5e-9b41-6b9cb3f8e230 req-a0147767-2e19-4853-a84b-c2afd3069d92 service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] Received unexpected event network-vif-plugged-1445af3a-a397-4188-803f-b804bdb8492b for instance with vm_state building and task_state spawning. [ 1668.793823] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Successfully updated port: 1445af3a-a397-4188-803f-b804bdb8492b {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1668.810676] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "refresh_cache-adc5639c-773e-4deb-9387-004833e94507" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.812745] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquired lock "refresh_cache-adc5639c-773e-4deb-9387-004833e94507" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.812745] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1668.912495] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1669.205787] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Updating instance_info_cache with network_info: [{"id": "1445af3a-a397-4188-803f-b804bdb8492b", "address": "fa:16:3e:d8:a5:e3", "network": {"id": "c51b4874-b4c7-4499-9e4f-1adad22f12fb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1063983689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "350dbc45d12e4bd3a2bd888b484b3173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1445af3a-a3", "ovs_interfaceid": "1445af3a-a397-4188-803f-b804bdb8492b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.216810] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Releasing lock "refresh_cache-adc5639c-773e-4deb-9387-004833e94507" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.217129] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Instance network_info: |[{"id": "1445af3a-a397-4188-803f-b804bdb8492b", "address": "fa:16:3e:d8:a5:e3", "network": {"id": "c51b4874-b4c7-4499-9e4f-1adad22f12fb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1063983689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "350dbc45d12e4bd3a2bd888b484b3173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1445af3a-a3", "ovs_interfaceid": "1445af3a-a397-4188-803f-b804bdb8492b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1669.217606] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:a5:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '140f4558-c11e-4af4-ab36-234e2d2f80a4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1445af3a-a397-4188-803f-b804bdb8492b', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1669.225538] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Creating folder: Project (350dbc45d12e4bd3a2bd888b484b3173). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1669.226124] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7febf53-61e4-45aa-8648-ac72deecde89 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.237639] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Created folder: Project (350dbc45d12e4bd3a2bd888b484b3173) in parent group-v942928. [ 1669.237843] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Creating folder: Instances. Parent ref: group-v943024. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1669.238114] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-daf69619-c384-43b8-b966-a272cc138eac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.247633] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Created folder: Instances in parent group-v943024. [ 1669.247892] env[62730]: DEBUG oslo.service.loopingcall [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1669.248107] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adc5639c-773e-4deb-9387-004833e94507] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1669.248322] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eeac47ce-92ad-4226-b289-8de73d9e9d89 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.269277] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1669.269277] env[62730]: value = "task-4837244" [ 1669.269277] env[62730]: _type = "Task" [ 1669.269277] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.277775] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837244, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.780086] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837244, 'name': CreateVM_Task, 'duration_secs': 0.295041} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.780262] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adc5639c-773e-4deb-9387-004833e94507] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1669.780998] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.781241] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.781497] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1669.781784] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50dc6b20-321d-49ba-bad7-1510ea6692b6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.786556] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Waiting for the task: (returnval){ [ 1669.786556] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52433c15-eaf1-f702-fcd3-cf7e5e14e637" [ 1669.786556] env[62730]: _type = "Task" [ 1669.786556] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.794517] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52433c15-eaf1-f702-fcd3-cf7e5e14e637, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.297312] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.297622] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1670.297844] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.825932] env[62730]: DEBUG nova.compute.manager [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] Received event network-changed-1445af3a-a397-4188-803f-b804bdb8492b {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1670.826220] env[62730]: DEBUG nova.compute.manager [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] Refreshing instance network info cache due to event network-changed-1445af3a-a397-4188-803f-b804bdb8492b. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1670.826439] env[62730]: DEBUG oslo_concurrency.lockutils [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] Acquiring lock "refresh_cache-adc5639c-773e-4deb-9387-004833e94507" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.826591] env[62730]: DEBUG oslo_concurrency.lockutils [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] Acquired lock "refresh_cache-adc5639c-773e-4deb-9387-004833e94507" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.826760] env[62730]: DEBUG nova.network.neutron [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] Refreshing network info cache for port 1445af3a-a397-4188-803f-b804bdb8492b {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1671.154082] env[62730]: DEBUG nova.network.neutron [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] Updated VIF entry in instance network info cache for port 1445af3a-a397-4188-803f-b804bdb8492b. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1671.154540] env[62730]: DEBUG nova.network.neutron [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] [instance: adc5639c-773e-4deb-9387-004833e94507] Updating instance_info_cache with network_info: [{"id": "1445af3a-a397-4188-803f-b804bdb8492b", "address": "fa:16:3e:d8:a5:e3", "network": {"id": "c51b4874-b4c7-4499-9e4f-1adad22f12fb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1063983689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "350dbc45d12e4bd3a2bd888b484b3173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1445af3a-a3", "ovs_interfaceid": "1445af3a-a397-4188-803f-b804bdb8492b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.166405] env[62730]: DEBUG oslo_concurrency.lockutils [req-92498f22-9fc2-4e8d-b952-42ca65da0c41 req-361d03b6-b4e4-408e-9893-1b4127f7737a service nova] Releasing lock "refresh_cache-adc5639c-773e-4deb-9387-004833e94507" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.488069] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "842e4145-ba83-48d5-8514-78532381eb2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.737599] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1694.747070] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1694.747070] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances with incomplete migration {{(pid=62730) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1696.746605] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1698.733676] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1698.737342] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.737228] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.737695] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.749637] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.749851] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.750031] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.750195] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1701.751337] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f910b02-f0a4-4744-b9ee-b835136b418b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.760446] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9599739-e124-406d-b38f-7d345c54b1ac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.776301] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4abcba-2501-4248-aafc-effd42ecb16f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.783213] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6bce33-62f3-4d43-b98c-7b3755c83a74 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.812709] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180540MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1701.812867] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.813043] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.953241] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance e8657fe0-3db2-4768-817f-944a736da401 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.953426] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance d276dbe7-a0fc-4518-9006-a0d749c07984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.953579] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.953709] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.953830] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.953954] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.954095] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.954219] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.954338] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.954456] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1701.970982] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1701.981843] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1701.982086] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1701.982255] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '8', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_93039e316cca49179277828e04a9ce61': '1', 'io_workload': '10', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '2', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1', 'num_task_spawning': '1', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1702.001543] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing inventories for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1702.017785] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating ProviderTree inventory for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1702.017785] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1702.030920] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing aggregate associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, aggregates: None {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1702.049880] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing trait associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1702.192938] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ada0e15-5beb-475f-a154-63475f24fe37 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.200869] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12717e00-52ec-4580-a981-4df7dfcc9327 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.231385] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2cab87-1732-49c1-82a9-70092553a0a7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.238711] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f766f984-8fe8-4def-9ec6-3e7e62d6ff15 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.252082] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1702.262341] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1702.276530] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1702.276729] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.464s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.272600] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.737534] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.737840] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1703.737991] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1703.758480] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: e8657fe0-3db2-4768-817f-944a736da401] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.758667] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.758812] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.758943] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.759081] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.759208] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.759393] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.759527] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.759649] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.759767] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1703.759887] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1703.760487] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1704.737977] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1708.737821] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1708.737821] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1711.998779] env[62730]: WARNING oslo_vmware.rw_handles [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1711.998779] env[62730]: ERROR oslo_vmware.rw_handles [ 1711.999584] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1712.001215] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1712.001472] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Copying Virtual Disk [datastore2] vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/53d2cfb4-6ad8-4c92-a35a-2fb1dc0f7174/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1712.001768] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a060fdb2-4f1e-4480-b3ab-9a795b5f095f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.010879] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Waiting for the task: (returnval){ [ 1712.010879] env[62730]: value = "task-4837245" [ 1712.010879] env[62730]: _type = "Task" [ 1712.010879] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.019305] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Task: {'id': task-4837245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.521801] env[62730]: DEBUG oslo_vmware.exceptions [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1712.522094] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.522668] env[62730]: ERROR nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1712.522668] env[62730]: Faults: ['InvalidArgument'] [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] Traceback (most recent call last): [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] yield resources [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self.driver.spawn(context, instance, image_meta, [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self._fetch_image_if_missing(context, vi) [ 1712.522668] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] image_cache(vi, tmp_image_ds_loc) [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] vm_util.copy_virtual_disk( [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] session._wait_for_task(vmdk_copy_task) [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] return self.wait_for_task(task_ref) [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] return evt.wait() [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] result = hub.switch() [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1712.523129] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] return self.greenlet.switch() [ 1712.523480] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1712.523480] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self.f(*self.args, **self.kw) [ 1712.523480] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1712.523480] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] raise exceptions.translate_fault(task_info.error) [ 1712.523480] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1712.523480] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] Faults: ['InvalidArgument'] [ 1712.523480] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] [ 1712.523480] env[62730]: INFO nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Terminating instance [ 1712.525041] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.525041] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1712.525041] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63bb032b-cb0d-4813-b632-895fcc68d545 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.527423] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1712.527623] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1712.528415] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3537a4b-4537-4f04-a949-26a1f8ce0dab {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.535551] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1712.535799] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-111b0432-6b5f-4967-8458-e8344f0059aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.537944] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1712.538134] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1712.539122] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68c62d05-1961-49e3-8abd-1d1fb5f97fdd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.544318] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1712.544318] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52457e44-2cc2-e045-2859-edeae81f67c0" [ 1712.544318] env[62730]: _type = "Task" [ 1712.544318] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.551350] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52457e44-2cc2-e045-2859-edeae81f67c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.604964] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1712.605222] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1712.605380] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Deleting the datastore file [datastore2] e8657fe0-3db2-4768-817f-944a736da401 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1712.605648] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c19e7fb2-1b53-4aa6-8e34-9d8e3509a4d7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.612715] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Waiting for the task: (returnval){ [ 1712.612715] env[62730]: value = "task-4837247" [ 1712.612715] env[62730]: _type = "Task" [ 1712.612715] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.620599] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Task: {'id': task-4837247, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.055738] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1713.056100] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating directory with path [datastore2] vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1713.056377] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91c7067b-78da-42e5-a28c-3e3bebd8c228 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.068103] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created directory with path [datastore2] vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1713.068306] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Fetch image to [datastore2] vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1713.068483] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1713.069249] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50ad325-6750-460d-8e7c-6221e4ec9f42 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.077099] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abac86a7-f830-4051-8b24-c4d0dc5d8470 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.086069] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d15d752-781c-4b9b-8d52-eaf192d8e50d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.119292] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f097a579-0d9e-49d5-964a-178b3d349864 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.127834] env[62730]: DEBUG oslo_vmware.api [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Task: {'id': task-4837247, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070861} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.128365] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1713.128562] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1713.128740] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1713.128918] env[62730]: INFO nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1713.130626] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-013559df-01a3-432e-85ff-b5bec067e0d1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.132444] env[62730]: DEBUG nova.compute.claims [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1713.132618] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.132832] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.158340] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1713.219527] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1713.278021] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1713.278568] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1713.398377] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b902f81-0f2d-41a2-bfe3-82f44d1ecb08 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.406775] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3823810-3927-4fb0-8608-ab74af9519a7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.437424] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6174fd32-0d8e-4ab7-9a5d-da7895c2d730 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.445268] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d448c5a-7a12-45b7-958e-3ef4e6093885 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.458604] env[62730]: DEBUG nova.compute.provider_tree [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1713.467766] env[62730]: DEBUG nova.scheduler.client.report [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1713.484563] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.352s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.485265] env[62730]: ERROR nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1713.485265] env[62730]: Faults: ['InvalidArgument'] [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] Traceback (most recent call last): [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self.driver.spawn(context, instance, image_meta, [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self._fetch_image_if_missing(context, vi) [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] image_cache(vi, tmp_image_ds_loc) [ 1713.485265] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] vm_util.copy_virtual_disk( [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] session._wait_for_task(vmdk_copy_task) [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] return self.wait_for_task(task_ref) [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] return evt.wait() [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] result = hub.switch() [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] return self.greenlet.switch() [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1713.486051] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] self.f(*self.args, **self.kw) [ 1713.486862] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1713.486862] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] raise exceptions.translate_fault(task_info.error) [ 1713.486862] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1713.486862] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] Faults: ['InvalidArgument'] [ 1713.486862] env[62730]: ERROR nova.compute.manager [instance: e8657fe0-3db2-4768-817f-944a736da401] [ 1713.486862] env[62730]: DEBUG nova.compute.utils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1713.487484] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Build of instance e8657fe0-3db2-4768-817f-944a736da401 was re-scheduled: A specified parameter was not correct: fileType [ 1713.487484] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1713.487935] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1713.488038] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1713.488219] env[62730]: DEBUG nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1713.488437] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1713.737826] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.738008] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1713.752170] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] There are 1 instances to clean {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1713.752438] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 876523eb-d8f4-4e0a-b9c2-2d9c074e6817] Instance has had 0 of 5 cleanup attempts {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11320}} [ 1713.910256] env[62730]: DEBUG nova.network.neutron [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.922321] env[62730]: INFO nova.compute.manager [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Took 0.43 seconds to deallocate network for instance. [ 1714.039327] env[62730]: INFO nova.scheduler.client.report [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Deleted allocations for instance e8657fe0-3db2-4768-817f-944a736da401 [ 1714.060817] env[62730]: DEBUG oslo_concurrency.lockutils [None req-bc59b6bb-bc59-4019-9593-a5b07af6a375 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "e8657fe0-3db2-4768-817f-944a736da401" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.204s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.061874] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "e8657fe0-3db2-4768-817f-944a736da401" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.920s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.062103] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Acquiring lock "e8657fe0-3db2-4768-817f-944a736da401-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.062318] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "e8657fe0-3db2-4768-817f-944a736da401-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.062489] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "e8657fe0-3db2-4768-817f-944a736da401-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.064589] env[62730]: INFO nova.compute.manager [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Terminating instance [ 1714.066943] env[62730]: DEBUG nova.compute.manager [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1714.067189] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1714.067500] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c04ceb8-90d9-4065-a4e1-d485770fcc52 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.077123] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2574760-a7e9-4aa7-9cd3-d0bf3bfc0de4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.088887] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1714.114919] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e8657fe0-3db2-4768-817f-944a736da401 could not be found. [ 1714.115156] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1714.115373] env[62730]: INFO nova.compute.manager [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] [instance: e8657fe0-3db2-4768-817f-944a736da401] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1714.115633] env[62730]: DEBUG oslo.service.loopingcall [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.115874] env[62730]: DEBUG nova.compute.manager [-] [instance: e8657fe0-3db2-4768-817f-944a736da401] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1714.115969] env[62730]: DEBUG nova.network.neutron [-] [instance: e8657fe0-3db2-4768-817f-944a736da401] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1714.145357] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.145625] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.147159] env[62730]: INFO nova.compute.claims [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1714.150383] env[62730]: DEBUG nova.network.neutron [-] [instance: e8657fe0-3db2-4768-817f-944a736da401] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.158480] env[62730]: INFO nova.compute.manager [-] [instance: e8657fe0-3db2-4768-817f-944a736da401] Took 0.04 seconds to deallocate network for instance. [ 1714.258678] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d758fd87-0689-4789-8a35-37cc02b91108 tempest-ImagesOneServerNegativeTestJSON-1586798878 tempest-ImagesOneServerNegativeTestJSON-1586798878-project-member] Lock "e8657fe0-3db2-4768-817f-944a736da401" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.354422] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbae74a-7eff-4c59-9f8e-aae1bd537e77 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.364115] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7002b559-8bb2-4b2a-8522-c030cfc1a99b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.394093] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b29491-0890-4978-b99e-6dd967c2cce8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.401888] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19725c70-6b0d-4691-b30b-87acaa9b0f25 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.415732] env[62730]: DEBUG nova.compute.provider_tree [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1714.427024] env[62730]: DEBUG nova.scheduler.client.report [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1714.443045] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.297s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.443505] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1714.479158] env[62730]: DEBUG nova.compute.utils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1714.480774] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1714.480951] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1714.489736] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1714.561202] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1714.587483] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1714.587750] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1714.587910] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1714.588109] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1714.588265] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1714.588457] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1714.588693] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1714.588858] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1714.589039] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1714.589249] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1714.589476] env[62730]: DEBUG nova.virt.hardware [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1714.590496] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1567219d-72a2-49a5-8416-657ace75112c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.598922] env[62730]: DEBUG nova.policy [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e92fe6107bb14b2190111d3ae1890a6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9f07569d97748e88c6a7840147de664', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1714.601459] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83bc8ff-0cf3-4931-be8b-7ac6e49ca64d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.064203] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1715.088273] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Getting list of instances from cluster (obj){ [ 1715.088273] env[62730]: value = "domain-c8" [ 1715.088273] env[62730]: _type = "ClusterComputeResource" [ 1715.088273] env[62730]: } {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1715.088273] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1e0a9f-ce77-4307-9ded-2b8fb5b521a9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.104515] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Got total of 9 instances {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1715.104596] env[62730]: WARNING nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] While synchronizing instance power states, found 10 instances in the database and 9 instances on the hypervisor. [ 1715.104800] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid d276dbe7-a0fc-4518-9006-a0d749c07984 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106022] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106022] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 9c36edef-9792-4f26-88c0-94a07eb1f588 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106022] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106022] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106022] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid a5a39785-b18a-4d18-a0af-8b4065c354f2 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106022] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 8d18fd69-cdaf-470c-b942-cd00c66f45ea {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106236] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 842e4145-ba83-48d5-8514-78532381eb2d {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106236] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid adc5639c-773e-4deb-9387-004833e94507 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106236] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 4eeba36c-efe6-4050-953f-75669079a0e0 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1715.106768] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "d276dbe7-a0fc-4518-9006-a0d749c07984" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.106768] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.106961] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "9c36edef-9792-4f26-88c0-94a07eb1f588" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.107180] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.107414] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.107622] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.107818] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.108323] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "842e4145-ba83-48d5-8514-78532381eb2d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.108323] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "adc5639c-773e-4deb-9387-004833e94507" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.108422] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "4eeba36c-efe6-4050-953f-75669079a0e0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.454276] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Successfully created port: 059fa8d3-adfc-4ca6-9b21-2186907a73fc {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1716.336416] env[62730]: DEBUG nova.compute.manager [req-0098a3a3-1091-4c43-b05f-d6fc16f8aa26 req-e487de52-00f1-483e-b962-0c3c4778404f service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Received event network-vif-plugged-059fa8d3-adfc-4ca6-9b21-2186907a73fc {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1716.336690] env[62730]: DEBUG oslo_concurrency.lockutils [req-0098a3a3-1091-4c43-b05f-d6fc16f8aa26 req-e487de52-00f1-483e-b962-0c3c4778404f service nova] Acquiring lock "4eeba36c-efe6-4050-953f-75669079a0e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.336836] env[62730]: DEBUG oslo_concurrency.lockutils [req-0098a3a3-1091-4c43-b05f-d6fc16f8aa26 req-e487de52-00f1-483e-b962-0c3c4778404f service nova] Lock "4eeba36c-efe6-4050-953f-75669079a0e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.337009] env[62730]: DEBUG oslo_concurrency.lockutils [req-0098a3a3-1091-4c43-b05f-d6fc16f8aa26 req-e487de52-00f1-483e-b962-0c3c4778404f service nova] Lock "4eeba36c-efe6-4050-953f-75669079a0e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.337190] env[62730]: DEBUG nova.compute.manager [req-0098a3a3-1091-4c43-b05f-d6fc16f8aa26 req-e487de52-00f1-483e-b962-0c3c4778404f service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] No waiting events found dispatching network-vif-plugged-059fa8d3-adfc-4ca6-9b21-2186907a73fc {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1716.337385] env[62730]: WARNING nova.compute.manager [req-0098a3a3-1091-4c43-b05f-d6fc16f8aa26 req-e487de52-00f1-483e-b962-0c3c4778404f service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Received unexpected event network-vif-plugged-059fa8d3-adfc-4ca6-9b21-2186907a73fc for instance with vm_state building and task_state spawning. [ 1716.365816] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Successfully updated port: 059fa8d3-adfc-4ca6-9b21-2186907a73fc {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1716.376578] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "refresh_cache-4eeba36c-efe6-4050-953f-75669079a0e0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.376811] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquired lock "refresh_cache-4eeba36c-efe6-4050-953f-75669079a0e0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.377308] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1716.469686] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1716.739942] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Updating instance_info_cache with network_info: [{"id": "059fa8d3-adfc-4ca6-9b21-2186907a73fc", "address": "fa:16:3e:5e:cc:6d", "network": {"id": "3e51074d-8246-4cfc-b11c-23a471b2e1b1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1010140024-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9f07569d97748e88c6a7840147de664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap059fa8d3-ad", "ovs_interfaceid": "059fa8d3-adfc-4ca6-9b21-2186907a73fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.753653] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Releasing lock "refresh_cache-4eeba36c-efe6-4050-953f-75669079a0e0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.753884] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Instance network_info: |[{"id": "059fa8d3-adfc-4ca6-9b21-2186907a73fc", "address": "fa:16:3e:5e:cc:6d", "network": {"id": "3e51074d-8246-4cfc-b11c-23a471b2e1b1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1010140024-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9f07569d97748e88c6a7840147de664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap059fa8d3-ad", "ovs_interfaceid": "059fa8d3-adfc-4ca6-9b21-2186907a73fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1716.754373] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:cc:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '059fa8d3-adfc-4ca6-9b21-2186907a73fc', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1716.762324] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Creating folder: Project (c9f07569d97748e88c6a7840147de664). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1716.762920] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-675f4e35-3cfe-4276-be63-42e52abb2220 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.774955] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Created folder: Project (c9f07569d97748e88c6a7840147de664) in parent group-v942928. [ 1716.775501] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Creating folder: Instances. Parent ref: group-v943027. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1716.775894] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d29cd7b5-0051-4cdc-b99f-fbf0c3d9464c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.787201] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Created folder: Instances in parent group-v943027. [ 1716.787457] env[62730]: DEBUG oslo.service.loopingcall [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.787654] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1716.787869] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46133480-a045-4082-9a0b-24b324b3b8b0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.808774] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1716.808774] env[62730]: value = "task-4837250" [ 1716.808774] env[62730]: _type = "Task" [ 1716.808774] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.816894] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837250, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.319473] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837250, 'name': CreateVM_Task, 'duration_secs': 0.312432} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.319712] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1717.320400] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.320592] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.320922] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1717.321202] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12e155ad-967f-4991-a8bf-21f52fdf6204 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.326970] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Waiting for the task: (returnval){ [ 1717.326970] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]524bfc13-8a78-6f3a-3bdc-2ec73a49aa75" [ 1717.326970] env[62730]: _type = "Task" [ 1717.326970] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.336488] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]524bfc13-8a78-6f3a-3bdc-2ec73a49aa75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.838917] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.839124] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1717.839390] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.365528] env[62730]: DEBUG nova.compute.manager [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Received event network-changed-059fa8d3-adfc-4ca6-9b21-2186907a73fc {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1718.365812] env[62730]: DEBUG nova.compute.manager [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Refreshing instance network info cache due to event network-changed-059fa8d3-adfc-4ca6-9b21-2186907a73fc. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1718.366064] env[62730]: DEBUG oslo_concurrency.lockutils [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] Acquiring lock "refresh_cache-4eeba36c-efe6-4050-953f-75669079a0e0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.366064] env[62730]: DEBUG oslo_concurrency.lockutils [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] Acquired lock "refresh_cache-4eeba36c-efe6-4050-953f-75669079a0e0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.366230] env[62730]: DEBUG nova.network.neutron [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Refreshing network info cache for port 059fa8d3-adfc-4ca6-9b21-2186907a73fc {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1719.019141] env[62730]: DEBUG nova.network.neutron [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Updated VIF entry in instance network info cache for port 059fa8d3-adfc-4ca6-9b21-2186907a73fc. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1719.019561] env[62730]: DEBUG nova.network.neutron [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Updating instance_info_cache with network_info: [{"id": "059fa8d3-adfc-4ca6-9b21-2186907a73fc", "address": "fa:16:3e:5e:cc:6d", "network": {"id": "3e51074d-8246-4cfc-b11c-23a471b2e1b1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1010140024-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9f07569d97748e88c6a7840147de664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap059fa8d3-ad", "ovs_interfaceid": "059fa8d3-adfc-4ca6-9b21-2186907a73fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.030024] env[62730]: DEBUG oslo_concurrency.lockutils [req-7666d801-cfb5-4648-83ea-d2e6691bb6b8 req-4e6b3625-7e80-454f-beef-768230bbb02b service nova] Releasing lock "refresh_cache-4eeba36c-efe6-4050-953f-75669079a0e0" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.570438] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "c964b0fe-e985-4f24-a57d-3fa31e73e815" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.570762] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "c964b0fe-e985-4f24-a57d-3fa31e73e815" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.700250] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "344fc477-d506-43bf-9fc7-e03889a43202" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.700493] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "344fc477-d506-43bf-9fc7-e03889a43202" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.782096] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.375843] env[62730]: WARNING oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1758.375843] env[62730]: ERROR oslo_vmware.rw_handles [ 1758.376584] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1758.379098] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1758.379437] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Copying Virtual Disk [datastore2] vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/0fb143ea-c631-4af4-9d30-ee0d70ee7d68/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1758.379787] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-895f2927-8455-47e7-9438-b2af181d6832 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.388987] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1758.388987] env[62730]: value = "task-4837251" [ 1758.388987] env[62730]: _type = "Task" [ 1758.388987] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.399730] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.733039] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.901028] env[62730]: DEBUG oslo_vmware.exceptions [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1758.901169] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.901747] env[62730]: ERROR nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1758.901747] env[62730]: Faults: ['InvalidArgument'] [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Traceback (most recent call last): [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] yield resources [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self.driver.spawn(context, instance, image_meta, [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self._fetch_image_if_missing(context, vi) [ 1758.901747] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] image_cache(vi, tmp_image_ds_loc) [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] vm_util.copy_virtual_disk( [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] session._wait_for_task(vmdk_copy_task) [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] return self.wait_for_task(task_ref) [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] return evt.wait() [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] result = hub.switch() [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1758.902186] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] return self.greenlet.switch() [ 1758.902602] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1758.902602] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self.f(*self.args, **self.kw) [ 1758.902602] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1758.902602] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] raise exceptions.translate_fault(task_info.error) [ 1758.902602] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1758.902602] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Faults: ['InvalidArgument'] [ 1758.902602] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] [ 1758.902602] env[62730]: INFO nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Terminating instance [ 1758.903808] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.904038] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1758.904298] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be16e5e7-01b2-44f9-839e-7442d1e3c9ad {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.907081] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1758.907287] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1758.908087] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8257dd88-39b8-43cc-b43b-57017c478361 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.916393] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1758.916677] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1bb5895-1882-4f9a-ac8e-0ec16c41c6f7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.919418] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1758.919615] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1758.920750] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4767866a-9adc-42b6-8245-feb9ef2fdb5f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.926831] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1758.926831] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]521f5044-a344-1ae5-36f2-3279f51269cc" [ 1758.926831] env[62730]: _type = "Task" [ 1758.926831] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.935230] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]521f5044-a344-1ae5-36f2-3279f51269cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.992092] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1758.992092] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1758.992092] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleting the datastore file [datastore2] d276dbe7-a0fc-4518-9006-a0d749c07984 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1758.992420] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57c45a09-2aa2-4403-8289-472cd99869c5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.001021] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1759.001021] env[62730]: value = "task-4837253" [ 1759.001021] env[62730]: _type = "Task" [ 1759.001021] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.010483] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.438399] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1759.438774] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating directory with path [datastore2] vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1759.438924] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-539f752b-8dac-4147-8222-adf01d957342 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.451619] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Created directory with path [datastore2] vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1759.451834] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Fetch image to [datastore2] vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1759.452015] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1759.452807] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415ca075-dcd2-4160-af82-922e2dc2950d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.460734] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e1c852-fe1e-4425-95f6-97073a1bcb74 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.470734] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0619b597-4789-492a-9334-771e9f858666 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.505455] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202086a5-c2fb-425a-8a8f-7ce83229cbac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.514985] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082012} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.516704] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1759.516895] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1759.517102] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1759.517296] env[62730]: INFO nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1759.519444] env[62730]: DEBUG nova.compute.claims [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1759.519617] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.519852] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.522538] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-afb3630c-ec38-4662-b192-30a42b6b08b7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.622155] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1759.676174] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1759.736335] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1759.736335] env[62730]: DEBUG oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1759.826332] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc410e2a-9e37-4a0b-97fd-8be49b2172f9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.835298] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c452c5f-003e-44be-abe8-3316bc85addb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.867779] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6f4710-40d5-47ff-a91c-42d49eaadfac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.876428] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72685cf5-521d-4f88-bce5-55c51be28cdc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.890816] env[62730]: DEBUG nova.compute.provider_tree [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.901051] env[62730]: DEBUG nova.scheduler.client.report [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1759.916831] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.397s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.917384] env[62730]: ERROR nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1759.917384] env[62730]: Faults: ['InvalidArgument'] [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Traceback (most recent call last): [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self.driver.spawn(context, instance, image_meta, [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self._fetch_image_if_missing(context, vi) [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] image_cache(vi, tmp_image_ds_loc) [ 1759.917384] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] vm_util.copy_virtual_disk( [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] session._wait_for_task(vmdk_copy_task) [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] return self.wait_for_task(task_ref) [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] return evt.wait() [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] result = hub.switch() [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] return self.greenlet.switch() [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1759.917770] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] self.f(*self.args, **self.kw) [ 1759.918241] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1759.918241] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] raise exceptions.translate_fault(task_info.error) [ 1759.918241] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1759.918241] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Faults: ['InvalidArgument'] [ 1759.918241] env[62730]: ERROR nova.compute.manager [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] [ 1759.918241] env[62730]: DEBUG nova.compute.utils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1759.922313] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Build of instance d276dbe7-a0fc-4518-9006-a0d749c07984 was re-scheduled: A specified parameter was not correct: fileType [ 1759.922313] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1759.922313] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1759.922313] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1759.922313] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1759.922651] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1760.393035] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.408540] env[62730]: INFO nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Took 0.49 seconds to deallocate network for instance. [ 1760.521745] env[62730]: INFO nova.scheduler.client.report [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleted allocations for instance d276dbe7-a0fc-4518-9006-a0d749c07984 [ 1760.545558] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 618.469s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.546851] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 422.947s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.547189] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "d276dbe7-a0fc-4518-9006-a0d749c07984-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.547397] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.547571] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.549956] env[62730]: INFO nova.compute.manager [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Terminating instance [ 1760.551880] env[62730]: DEBUG nova.compute.manager [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1760.552090] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1760.552368] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a7acde36-234d-46b1-976d-7fda983b5df5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.562353] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bbe423-f1d1-40cc-8e57-85942e15456a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.573534] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1760.596975] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d276dbe7-a0fc-4518-9006-a0d749c07984 could not be found. [ 1760.597223] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1760.597410] env[62730]: INFO nova.compute.manager [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1760.597664] env[62730]: DEBUG oslo.service.loopingcall [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1760.597930] env[62730]: DEBUG nova.compute.manager [-] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1760.598039] env[62730]: DEBUG nova.network.neutron [-] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1760.623424] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.623680] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.625170] env[62730]: INFO nova.compute.claims [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1760.628212] env[62730]: DEBUG nova.network.neutron [-] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.636934] env[62730]: INFO nova.compute.manager [-] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] Took 0.04 seconds to deallocate network for instance. [ 1760.736845] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.737108] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.771480] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b3b313e6-2e51-48d7-887d-f5cf0f3c07e1 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.225s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.772455] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 45.666s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.772651] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: d276dbe7-a0fc-4518-9006-a0d749c07984] During sync_power_state the instance has a pending task (deleting). Skip. [ 1760.772823] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "d276dbe7-a0fc-4518-9006-a0d749c07984" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.891553] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7d9b97-b38b-4138-82e4-9a7a2fcfb43a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.900239] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d98a4be-a77e-48d2-ae02-c591e89d87d6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.932912] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3bf272-be5e-4b0e-ad4b-befabc63c8ab {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.941204] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c22eaf8-f99f-4b6c-820b-eeed1885ba62 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.955249] env[62730]: DEBUG nova.compute.provider_tree [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1760.964314] env[62730]: DEBUG nova.scheduler.client.report [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1760.979617] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.356s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.980186] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1761.028423] env[62730]: DEBUG nova.compute.utils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1761.030116] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1761.030261] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1761.042284] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1761.116189] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1761.133408] env[62730]: DEBUG nova.policy [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b22447bf6ab4e93a4450b13d7d9a3a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '861b7ee6cc2444678f4056271d23e872', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1761.144218] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1761.144469] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1761.144628] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1761.144809] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1761.144955] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1761.145121] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1761.145336] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1761.145495] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1761.145663] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1761.146081] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1761.146081] env[62730]: DEBUG nova.virt.hardware [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1761.146929] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d94e82-3e19-41e3-a260-ff9f91859a94 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.156008] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b0ecd3-5c27-439d-9b09-d38ccc8a15b8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.653629] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Successfully created port: 79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1761.737685] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1761.748840] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.749226] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.749554] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.749803] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1761.751340] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dc3328-e5f6-4e07-af06-9589f2a5b0dd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.762347] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0f39c1-9254-4b33-a645-e8316e0fb3c8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.779116] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae81f420-b1f9-4b0a-ac3f-db55669c6805 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.787312] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478021cc-1e0c-4c8e-b815-c13389adcd3c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.819022] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180522MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1761.819226] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.819442] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.904765] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.904765] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.904945] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.904990] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.905244] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.905314] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.905376] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.905496] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.905613] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.905727] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1761.917686] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1761.930343] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1761.930548] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1761.931115] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '10', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '7', 'num_os_type_None': '10', 'num_proj_7d775e3135484ed8b81c9d2991f2bedb': '1', 'io_workload': '10', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1', 'num_task_spawning': '3', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1762.107612] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61eb3822-14ba-4394-a4f7-bc8b8a90c725 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.115342] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec6c36c-2560-4757-9be7-3725581afe73 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.147464] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356b3913-331e-4caf-a9b2-7d2f75b145de {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.156139] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92f42c8-46cc-4cae-8302-7eeef3d488b4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.170720] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.180038] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1762.198992] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1762.198992] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.379s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.031123] env[62730]: DEBUG nova.compute.manager [req-3cb5fa65-4541-423d-a123-ddbb7f7cf575 req-6278bd45-4238-4d20-b4bf-73f1e24cea9d service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Received event network-vif-plugged-79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1763.031420] env[62730]: DEBUG oslo_concurrency.lockutils [req-3cb5fa65-4541-423d-a123-ddbb7f7cf575 req-6278bd45-4238-4d20-b4bf-73f1e24cea9d service nova] Acquiring lock "c1dcad10-0c5a-4aca-8870-42569cfd4448-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.031592] env[62730]: DEBUG oslo_concurrency.lockutils [req-3cb5fa65-4541-423d-a123-ddbb7f7cf575 req-6278bd45-4238-4d20-b4bf-73f1e24cea9d service nova] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.031766] env[62730]: DEBUG oslo_concurrency.lockutils [req-3cb5fa65-4541-423d-a123-ddbb7f7cf575 req-6278bd45-4238-4d20-b4bf-73f1e24cea9d service nova] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.031890] env[62730]: DEBUG nova.compute.manager [req-3cb5fa65-4541-423d-a123-ddbb7f7cf575 req-6278bd45-4238-4d20-b4bf-73f1e24cea9d service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] No waiting events found dispatching network-vif-plugged-79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1763.032111] env[62730]: WARNING nova.compute.manager [req-3cb5fa65-4541-423d-a123-ddbb7f7cf575 req-6278bd45-4238-4d20-b4bf-73f1e24cea9d service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Received unexpected event network-vif-plugged-79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2 for instance with vm_state building and task_state spawning. [ 1763.051790] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Successfully updated port: 79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.065888] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "refresh_cache-c1dcad10-0c5a-4aca-8870-42569cfd4448" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.067027] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquired lock "refresh_cache-c1dcad10-0c5a-4aca-8870-42569cfd4448" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.067027] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1763.150393] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1763.341233] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Updating instance_info_cache with network_info: [{"id": "79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2", "address": "fa:16:3e:de:29:87", "network": {"id": "620c1f68-972f-4380-86f9-2739c817e947", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-694792688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "861b7ee6cc2444678f4056271d23e872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79520a2d-68", "ovs_interfaceid": "79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.355865] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Releasing lock "refresh_cache-c1dcad10-0c5a-4aca-8870-42569cfd4448" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.356231] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Instance network_info: |[{"id": "79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2", "address": "fa:16:3e:de:29:87", "network": {"id": "620c1f68-972f-4380-86f9-2739c817e947", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-694792688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "861b7ee6cc2444678f4056271d23e872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79520a2d-68", "ovs_interfaceid": "79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1763.357015] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:29:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1763.364941] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Creating folder: Project (861b7ee6cc2444678f4056271d23e872). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1763.365613] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c21dbdff-2a3a-4fc3-bbea-d23c70b93999 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.376392] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Created folder: Project (861b7ee6cc2444678f4056271d23e872) in parent group-v942928. [ 1763.376612] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Creating folder: Instances. Parent ref: group-v943030. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1763.376809] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a45387e-2d56-4d16-9cc3-afe8431567c6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.386991] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Created folder: Instances in parent group-v943030. [ 1763.387287] env[62730]: DEBUG oslo.service.loopingcall [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.387490] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1763.387701] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e570ef5b-c8f5-4e75-b78d-4ca1728869e4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.407411] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1763.407411] env[62730]: value = "task-4837256" [ 1763.407411] env[62730]: _type = "Task" [ 1763.407411] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.415515] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837256, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.918172] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837256, 'name': CreateVM_Task, 'duration_secs': 0.32278} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.918392] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1763.919107] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.919302] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.919635] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1763.919889] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dfb8d19-bf6d-41d5-863c-a47df09004e8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.925017] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for the task: (returnval){ [ 1763.925017] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52c1e139-d0eb-4d7f-c9e4-1cd57bcbf0a6" [ 1763.925017] env[62730]: _type = "Task" [ 1763.925017] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.934116] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52c1e139-d0eb-4d7f-c9e4-1cd57bcbf0a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.198650] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1764.435951] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.436155] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1764.436381] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.058682] env[62730]: DEBUG nova.compute.manager [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Received event network-changed-79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1765.058915] env[62730]: DEBUG nova.compute.manager [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Refreshing instance network info cache due to event network-changed-79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1765.059181] env[62730]: DEBUG oslo_concurrency.lockutils [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] Acquiring lock "refresh_cache-c1dcad10-0c5a-4aca-8870-42569cfd4448" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.059337] env[62730]: DEBUG oslo_concurrency.lockutils [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] Acquired lock "refresh_cache-c1dcad10-0c5a-4aca-8870-42569cfd4448" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.059497] env[62730]: DEBUG nova.network.neutron [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Refreshing network info cache for port 79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1765.442551] env[62730]: DEBUG nova.network.neutron [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Updated VIF entry in instance network info cache for port 79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1765.442931] env[62730]: DEBUG nova.network.neutron [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Updating instance_info_cache with network_info: [{"id": "79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2", "address": "fa:16:3e:de:29:87", "network": {"id": "620c1f68-972f-4380-86f9-2739c817e947", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-694792688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "861b7ee6cc2444678f4056271d23e872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79520a2d-68", "ovs_interfaceid": "79520a2d-68a9-4f9b-8e60-4a2f10f1a6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.453954] env[62730]: DEBUG oslo_concurrency.lockutils [req-cf837746-a2b1-4241-b54c-a4a478fc8ad6 req-5a3cd660-578e-47bd-b70f-6a21f790367b service nova] Releasing lock "refresh_cache-c1dcad10-0c5a-4aca-8870-42569cfd4448" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.738186] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1765.738423] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1765.738516] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1765.766277] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.766509] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.766737] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.766950] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.767189] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.767418] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.767632] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.767861] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.768100] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.768294] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1765.768504] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1766.737999] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.737630] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.738034] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1779.685282] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "adc5639c-773e-4deb-9387-004833e94507" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.636131] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquiring lock "5b182a44-2add-42f6-913d-14c5379e76be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.636131] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Lock "5b182a44-2add-42f6-913d-14c5379e76be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.295195] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "4eeba36c-efe6-4050-953f-75669079a0e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.355392] env[62730]: WARNING oslo_vmware.rw_handles [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1805.355392] env[62730]: ERROR oslo_vmware.rw_handles [ 1805.356121] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1805.357867] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1805.358158] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Copying Virtual Disk [datastore2] vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/f0adc971-0caa-4aef-aa22-c04da344ebab/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1805.358482] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d725daa-74ee-43d1-93ab-d6fe7677b33b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.367779] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1805.367779] env[62730]: value = "task-4837257" [ 1805.367779] env[62730]: _type = "Task" [ 1805.367779] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.376483] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837257, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.878811] env[62730]: DEBUG oslo_vmware.exceptions [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1805.879108] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.879675] env[62730]: ERROR nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1805.879675] env[62730]: Faults: ['InvalidArgument'] [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Traceback (most recent call last): [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] yield resources [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self.driver.spawn(context, instance, image_meta, [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self._fetch_image_if_missing(context, vi) [ 1805.879675] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] image_cache(vi, tmp_image_ds_loc) [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] vm_util.copy_virtual_disk( [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] session._wait_for_task(vmdk_copy_task) [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] return self.wait_for_task(task_ref) [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] return evt.wait() [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] result = hub.switch() [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1805.880124] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] return self.greenlet.switch() [ 1805.880556] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1805.880556] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self.f(*self.args, **self.kw) [ 1805.880556] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1805.880556] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] raise exceptions.translate_fault(task_info.error) [ 1805.880556] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1805.880556] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Faults: ['InvalidArgument'] [ 1805.880556] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] [ 1805.880556] env[62730]: INFO nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Terminating instance [ 1805.881620] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.881831] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1805.882095] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98092e47-e32b-4870-8efc-1ccb630afaf0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.884498] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1805.884708] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1805.885472] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a5bb66-5cdd-4fd4-b167-3d02a606d579 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.892765] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1805.893067] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be5f1dca-5d55-4350-9ed6-86163e3e2aef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.895478] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1805.895661] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1805.896672] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43aa4fac-af82-4917-853f-5eae4adacc2b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.901819] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Waiting for the task: (returnval){ [ 1805.901819] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]524fc09f-7f56-1a53-d078-60ca994c14f4" [ 1805.901819] env[62730]: _type = "Task" [ 1805.901819] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.909603] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]524fc09f-7f56-1a53-d078-60ca994c14f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.976555] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1805.976822] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1805.977053] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleting the datastore file [datastore2] 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1805.977374] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d96c6f92-53b3-4d94-9133-1aaa0fb46135 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.985870] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for the task: (returnval){ [ 1805.985870] env[62730]: value = "task-4837259" [ 1805.985870] env[62730]: _type = "Task" [ 1805.985870] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.994792] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.413308] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1806.413648] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Creating directory with path [datastore2] vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1806.413764] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6043f2b0-b8ec-4db2-8ae2-861ac171c15b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.425716] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Created directory with path [datastore2] vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1806.425931] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Fetch image to [datastore2] vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1806.426077] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1806.426848] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1f3072-1c6b-4663-9454-b9b1d4a7fd42 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.434176] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5ece20-9e21-4c8d-80d5-d6aeb8e82882 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.444102] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36306d9-afaf-4a0f-8e4c-5745cbf384db {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.474895] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4177328a-9b0f-4b4a-92fb-56d05d40c794 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.481757] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b88ef3d9-fa61-465e-8dc7-0b91ec8387e7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.495952] env[62730]: DEBUG oslo_vmware.api [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Task: {'id': task-4837259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071978} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.496224] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1806.496406] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1806.496575] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1806.496750] env[62730]: INFO nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1806.498936] env[62730]: DEBUG nova.compute.claims [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1806.499139] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.499357] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.504015] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1806.643240] env[62730]: DEBUG oslo_vmware.rw_handles [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1806.702891] env[62730]: DEBUG oslo_vmware.rw_handles [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1806.703103] env[62730]: DEBUG oslo_vmware.rw_handles [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1806.757009] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694b1cde-2bcb-4a2a-a019-6a05cf70442f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.765268] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e452088a-22dc-48f7-ab70-5324747cbf18 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.796894] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ee1df2-f2d2-4040-b7bb-4e5151f82db9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.805245] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d98ed4e-0683-4237-b408-83005e3e31e2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.821360] env[62730]: DEBUG nova.compute.provider_tree [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1806.832193] env[62730]: DEBUG nova.scheduler.client.report [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1806.845980] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.346s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.846543] env[62730]: ERROR nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1806.846543] env[62730]: Faults: ['InvalidArgument'] [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Traceback (most recent call last): [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self.driver.spawn(context, instance, image_meta, [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self._fetch_image_if_missing(context, vi) [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] image_cache(vi, tmp_image_ds_loc) [ 1806.846543] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] vm_util.copy_virtual_disk( [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] session._wait_for_task(vmdk_copy_task) [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] return self.wait_for_task(task_ref) [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] return evt.wait() [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] result = hub.switch() [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] return self.greenlet.switch() [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1806.846993] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] self.f(*self.args, **self.kw) [ 1806.847412] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1806.847412] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] raise exceptions.translate_fault(task_info.error) [ 1806.847412] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1806.847412] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Faults: ['InvalidArgument'] [ 1806.847412] env[62730]: ERROR nova.compute.manager [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] [ 1806.847412] env[62730]: DEBUG nova.compute.utils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1806.848716] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Build of instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a was re-scheduled: A specified parameter was not correct: fileType [ 1806.848716] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1806.849176] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1806.849355] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1806.849529] env[62730]: DEBUG nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1806.849695] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1807.220260] env[62730]: DEBUG nova.network.neutron [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.233579] env[62730]: INFO nova.compute.manager [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Took 0.38 seconds to deallocate network for instance. [ 1807.350663] env[62730]: INFO nova.scheduler.client.report [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Deleted allocations for instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a [ 1807.373183] env[62730]: DEBUG oslo_concurrency.lockutils [None req-1b264aba-3cac-4222-9702-6a60455a67b2 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 665.272s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.374329] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 469.855s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.374731] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Acquiring lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.374813] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.375057] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.377531] env[62730]: INFO nova.compute.manager [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Terminating instance [ 1807.379584] env[62730]: DEBUG nova.compute.manager [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1807.380235] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1807.380751] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73f3d93b-369b-45fc-a0fb-de9d638486ef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.387609] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1807.393365] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fb484a-7840-41ef-961b-147f94fedee2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.427788] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a could not be found. [ 1807.427788] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1807.428443] env[62730]: INFO nova.compute.manager [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1807.428443] env[62730]: DEBUG oslo.service.loopingcall [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.430543] env[62730]: DEBUG nova.compute.manager [-] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1807.430668] env[62730]: DEBUG nova.network.neutron [-] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1807.447454] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.447969] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.449594] env[62730]: INFO nova.compute.claims [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1807.459798] env[62730]: DEBUG nova.network.neutron [-] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.477377] env[62730]: INFO nova.compute.manager [-] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] Took 0.05 seconds to deallocate network for instance. [ 1807.584444] env[62730]: DEBUG oslo_concurrency.lockutils [None req-c2e92720-51e9-4ef6-880f-9bfe78a96e94 tempest-MultipleCreateTestJSON-667646996 tempest-MultipleCreateTestJSON-667646996-project-member] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.585306] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 92.478s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.585527] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 3e1c5c72-44f3-48dc-b649-b3e4fe141f0a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1807.585723] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "3e1c5c72-44f3-48dc-b649-b3e4fe141f0a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.659234] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d26f19d-9950-42c8-982f-9f2f0e2f9faf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.667469] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0716779d-2dd3-4de4-8946-4a3a519f9a5e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.698657] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7284ce-8462-479a-aef5-1f8558b43cf4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.706582] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ac808c-878e-47ea-828b-e4adb78e381a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.720191] env[62730]: DEBUG nova.compute.provider_tree [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.728504] env[62730]: DEBUG nova.scheduler.client.report [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1807.741665] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.294s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.742174] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1807.777532] env[62730]: DEBUG nova.compute.utils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1807.779144] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Not allocating networking since 'none' was specified. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1807.797377] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1807.881016] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1807.906651] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1807.906975] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1807.907163] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1807.907356] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1807.907513] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1807.907701] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1807.907915] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1807.908094] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1807.908267] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1807.908449] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1807.908632] env[62730]: DEBUG nova.virt.hardware [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1807.909562] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f1e4f1-7c3c-42c5-b9d7-b2e04f4cee00 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.917920] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8bb878-cad5-4ebc-81ec-4d1c32bf1cf2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.931236] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance VIF info [] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1807.936678] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating folder: Project (3a2f02e8e5ce4988937c304a6e6858be). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1807.936925] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d74016c-c952-4a56-ba27-060d6f9b1c1c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.946017] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Created folder: Project (3a2f02e8e5ce4988937c304a6e6858be) in parent group-v942928. [ 1807.946202] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating folder: Instances. Parent ref: group-v943033. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1807.946416] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57447238-0990-47e9-bce6-7c8031f08bef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.953943] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Created folder: Instances in parent group-v943033. [ 1807.954149] env[62730]: DEBUG oslo.service.loopingcall [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.954331] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1807.954525] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f0a0477-9c2a-46a4-a2b1-765cbdfaa5fe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.970337] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1807.970337] env[62730]: value = "task-4837262" [ 1807.970337] env[62730]: _type = "Task" [ 1807.970337] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.977506] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837262, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.480830] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837262, 'name': CreateVM_Task} progress is 99%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.618977] env[62730]: DEBUG oslo_concurrency.lockutils [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "c1dcad10-0c5a-4aca-8870-42569cfd4448" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.982148] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837262, 'name': CreateVM_Task} progress is 99%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.482729] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837262, 'name': CreateVM_Task, 'duration_secs': 1.256053} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.483153] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1809.483344] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.483504] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.483833] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1809.484116] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cba47ca-0779-4d1f-a427-e3c411765406 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.489227] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 1809.489227] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]522916bb-232a-ee05-b162-b26cca90a726" [ 1809.489227] env[62730]: _type = "Task" [ 1809.489227] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.497227] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]522916bb-232a-ee05-b162-b26cca90a726, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.000600] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.000870] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1810.001107] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.738206] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.733084] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.736616] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.737038] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.737157] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.750661] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.750912] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.751067] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.751233] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1823.752386] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2e0201-800d-40b0-b84e-412dde11368b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.761611] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6cef48-422b-4413-8bd2-4f20aa21f6e2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.775523] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d56328-aed0-42f9-b876-aabe43fc6427 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.781951] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb35706b-0e4d-431c-b20d-d558e80de0a1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.812572] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180537MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1823.812732] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.812909] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.897027] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.897208] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.897337] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.897462] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.897584] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.897706] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.897820] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.897941] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.898069] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.898207] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1823.911753] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1823.922033] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1823.922268] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1823.922430] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '11', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_39999c4fd29e4266ac76cfbe0c95df4d': '1', 'io_workload': '10', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '1', 'num_task_spawning': '1', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1824.076560] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57cbf48-ed4b-4efb-9ddf-29552a3bbb25 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.084680] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a0e612-a1af-4307-ba72-bb220a722170 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.114131] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e48df71-9699-4c8e-9e62-9aa96ad5060d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.121511] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41285704-1554-4e15-b856-e27131de1504 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.135347] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1824.144032] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1824.161341] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1824.161520] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.349s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.162109] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1825.733274] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1825.755568] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1825.755763] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1825.755805] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1825.776617] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.776800] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.776800] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.776912] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.777039] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.777166] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.777288] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.777409] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.777529] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.777648] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1825.777769] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1827.737865] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1828.737690] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1828.737893] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1854.906130] env[62730]: WARNING oslo_vmware.rw_handles [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1854.906130] env[62730]: ERROR oslo_vmware.rw_handles [ 1854.906130] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1854.908205] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1854.908445] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Copying Virtual Disk [datastore2] vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/34ffeca4-fdf7-4ddc-a403-1ca7ea6412db/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1854.908792] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-357ed552-7ed7-46ba-9a7c-ed16a9e90fbc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.920522] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Waiting for the task: (returnval){ [ 1854.920522] env[62730]: value = "task-4837263" [ 1854.920522] env[62730]: _type = "Task" [ 1854.920522] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.928918] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Task: {'id': task-4837263, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.431402] env[62730]: DEBUG oslo_vmware.exceptions [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1855.431656] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.432181] env[62730]: ERROR nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1855.432181] env[62730]: Faults: ['InvalidArgument'] [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Traceback (most recent call last): [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] yield resources [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self.driver.spawn(context, instance, image_meta, [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self._fetch_image_if_missing(context, vi) [ 1855.432181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] image_cache(vi, tmp_image_ds_loc) [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] vm_util.copy_virtual_disk( [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] session._wait_for_task(vmdk_copy_task) [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] return self.wait_for_task(task_ref) [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] return evt.wait() [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] result = hub.switch() [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1855.432882] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] return self.greenlet.switch() [ 1855.433426] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1855.433426] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self.f(*self.args, **self.kw) [ 1855.433426] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1855.433426] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] raise exceptions.translate_fault(task_info.error) [ 1855.433426] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1855.433426] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Faults: ['InvalidArgument'] [ 1855.433426] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] [ 1855.433426] env[62730]: INFO nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Terminating instance [ 1855.434465] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.434465] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1855.434621] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0af70f3c-7025-4e98-ae2c-d11ceb1e6746 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.436716] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1855.436909] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1855.437626] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec09151-739d-41bb-88a2-9235df7a43d2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.445284] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1855.445537] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5c4b785-5302-474f-83c4-4a7007c54ac5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.447700] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1855.447893] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1855.448872] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fae012e1-7444-4d7f-9f39-9035d74725f2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.453448] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Waiting for the task: (returnval){ [ 1855.453448] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]521d531d-6a5b-153e-973f-a151c985f803" [ 1855.453448] env[62730]: _type = "Task" [ 1855.453448] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.467753] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1855.467998] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Creating directory with path [datastore2] vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1855.468242] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a75dc9b5-df4b-4ba0-8da0-a382ab713ee2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.488882] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Created directory with path [datastore2] vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1855.489107] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Fetch image to [datastore2] vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1855.489281] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1855.490154] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd7ac5c-0806-49a3-ac74-945126cd2921 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.497595] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73278766-dd82-4419-9a81-4c52b076a2e1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.506924] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069aa7a5-75c4-4ddb-8e8b-2d594446cedb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.540285] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283675c9-8550-4e37-96da-5a0ffad3f27a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.542980] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1855.543185] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1855.543361] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Deleting the datastore file [datastore2] 9c36edef-9792-4f26-88c0-94a07eb1f588 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1855.543592] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1456f59d-187f-434e-ae84-87a2ed4f95a2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.549142] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c0f07b57-3b7d-4694-9055-15da75b3490c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.551887] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Waiting for the task: (returnval){ [ 1855.551887] env[62730]: value = "task-4837265" [ 1855.551887] env[62730]: _type = "Task" [ 1855.551887] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.559604] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Task: {'id': task-4837265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.571509] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1855.699546] env[62730]: DEBUG oslo_vmware.rw_handles [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1855.759290] env[62730]: DEBUG oslo_vmware.rw_handles [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1855.759776] env[62730]: DEBUG oslo_vmware.rw_handles [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1856.062831] env[62730]: DEBUG oslo_vmware.api [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Task: {'id': task-4837265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069398} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.063221] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1856.063302] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1856.063445] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1856.063618] env[62730]: INFO nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1856.065701] env[62730]: DEBUG nova.compute.claims [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1856.065881] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.066111] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.269099] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d121826a-743f-4067-9d4c-54db28799243 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.276674] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1924d173-3aa7-4849-97aa-6f675cbb0e8e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.307446] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65edf40b-7aba-464a-936c-d25718ad9d92 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.315453] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b78a84d-a1b4-465f-ad8b-7028afd48a90 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.329141] env[62730]: DEBUG nova.compute.provider_tree [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1856.339710] env[62730]: DEBUG nova.scheduler.client.report [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1856.358203] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.292s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.358769] env[62730]: ERROR nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1856.358769] env[62730]: Faults: ['InvalidArgument'] [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Traceback (most recent call last): [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self.driver.spawn(context, instance, image_meta, [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self._fetch_image_if_missing(context, vi) [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] image_cache(vi, tmp_image_ds_loc) [ 1856.358769] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] vm_util.copy_virtual_disk( [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] session._wait_for_task(vmdk_copy_task) [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] return self.wait_for_task(task_ref) [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] return evt.wait() [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] result = hub.switch() [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] return self.greenlet.switch() [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1856.359181] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] self.f(*self.args, **self.kw) [ 1856.359529] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1856.359529] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] raise exceptions.translate_fault(task_info.error) [ 1856.359529] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1856.359529] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Faults: ['InvalidArgument'] [ 1856.359529] env[62730]: ERROR nova.compute.manager [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] [ 1856.359529] env[62730]: DEBUG nova.compute.utils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1856.361150] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Build of instance 9c36edef-9792-4f26-88c0-94a07eb1f588 was re-scheduled: A specified parameter was not correct: fileType [ 1856.361150] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1856.361536] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1856.361844] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1856.361915] env[62730]: DEBUG nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1856.362060] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1856.708328] env[62730]: DEBUG nova.network.neutron [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.721391] env[62730]: INFO nova.compute.manager [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Took 0.36 seconds to deallocate network for instance. [ 1856.824329] env[62730]: INFO nova.scheduler.client.report [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Deleted allocations for instance 9c36edef-9792-4f26-88c0-94a07eb1f588 [ 1856.847043] env[62730]: DEBUG oslo_concurrency.lockutils [None req-ce8f1c7e-d799-4503-a70d-98007715aeb1 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 672.813s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.848188] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 476.253s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.848439] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Acquiring lock "9c36edef-9792-4f26-88c0-94a07eb1f588-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.848687] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.848859] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.852336] env[62730]: INFO nova.compute.manager [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Terminating instance [ 1856.853734] env[62730]: DEBUG nova.compute.manager [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1856.853942] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1856.854499] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-baae35e3-3037-4945-a5e0-5388d49c9677 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.865232] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f564c73-5fe0-448e-a3e6-09d1522c4b0a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.875976] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1856.899262] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c36edef-9792-4f26-88c0-94a07eb1f588 could not be found. [ 1856.899485] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1856.899670] env[62730]: INFO nova.compute.manager [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1856.899926] env[62730]: DEBUG oslo.service.loopingcall [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1856.900167] env[62730]: DEBUG nova.compute.manager [-] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1856.900266] env[62730]: DEBUG nova.network.neutron [-] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1856.929699] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.929974] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.931539] env[62730]: INFO nova.compute.claims [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1856.939026] env[62730]: DEBUG nova.network.neutron [-] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.945157] env[62730]: INFO nova.compute.manager [-] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] Took 0.04 seconds to deallocate network for instance. [ 1857.719420] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2283de11-c9ae-4760-bd77-98527b6c7828 tempest-AttachInterfacesTestJSON-1720510119 tempest-AttachInterfacesTestJSON-1720510119-project-member] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.870s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.719420] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 142.612s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.719420] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 9c36edef-9792-4f26-88c0-94a07eb1f588] During sync_power_state the instance has a pending task (deleting). Skip. [ 1857.719420] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "9c36edef-9792-4f26-88c0-94a07eb1f588" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.852187] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a710d0ab-2ee5-42a9-9780-a651f7bbb771 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.860495] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70c3083-3e3e-44b0-9510-99f67ce6db2b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.890140] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440c15e1-a606-4ab0-9b2b-dc60d2e4aa18 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.897613] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b875beb-194c-4ec7-a7cf-190d5809f23f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.911118] env[62730]: DEBUG nova.compute.provider_tree [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1857.920485] env[62730]: DEBUG nova.scheduler.client.report [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1857.936451] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.006s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.937038] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1857.970947] env[62730]: DEBUG nova.compute.utils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1857.972591] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Not allocating networking since 'none' was specified. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1857.983745] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1858.045469] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1858.072652] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1858.072967] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1858.073117] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1858.073324] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1858.073468] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1858.073654] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1858.073879] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1858.074087] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1858.074323] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1858.074538] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1858.074745] env[62730]: DEBUG nova.virt.hardware [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1858.075643] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8316e87a-c1e7-4cf4-ab0a-5c3ce2bb3435 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.084236] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec23b12-b2cd-4d5d-803c-528d05b9d082 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.097835] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance VIF info [] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1858.103550] env[62730]: DEBUG oslo.service.loopingcall [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1858.103713] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1858.103920] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52b66c2b-aa95-4943-9217-c49c07e02c37 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.121093] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1858.121093] env[62730]: value = "task-4837266" [ 1858.121093] env[62730]: _type = "Task" [ 1858.121093] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.128791] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837266, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.631427] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837266, 'name': CreateVM_Task, 'duration_secs': 0.275008} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.631619] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1858.631989] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.632169] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.632489] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1858.632742] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3515666-1975-4670-9f1f-377051ed97ac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.637606] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 1858.637606] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52cdaa0a-7cd7-3022-8197-f34554780455" [ 1858.637606] env[62730]: _type = "Task" [ 1858.637606] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.645579] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52cdaa0a-7cd7-3022-8197-f34554780455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.147601] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.147971] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1859.148030] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.737633] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1879.734419] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1881.737425] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.738663] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.739040] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.750254] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.750475] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.750644] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.750813] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1883.751941] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4868bf-651d-43ef-982e-5bf11eacf371 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.761566] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96131338-9252-4ba7-b5f4-235fae4b431a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.775872] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4090950-951b-4a1c-952e-dc3f00faa33f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.782931] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f394086d-9393-4599-bd66-30250b387779 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.816545] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180546MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1883.816927] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.816927] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.907492] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.907691] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.907827] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.907972] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.908113] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.908256] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.908387] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.908532] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.908669] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.908798] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1883.926051] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1883.926051] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1883.926051] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '12', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_642da990c34d4a64be9ab53e87990e8a': '1', 'io_workload': '10', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '1', 'num_task_spawning': '2', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '2'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1884.071045] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7538b0f9-f175-427b-b1cd-efefbdf1ce7c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.077847] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9ce0d9-7457-4b15-8cc4-cb3e44287558 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.108094] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac294141-34cd-42ec-8d2d-b65591536d34 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.115713] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6d8576-6acf-4d7d-a9de-f168b89291ba {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.129589] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.138210] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1884.153296] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1884.153517] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.337s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.153271] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1886.737550] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1886.737910] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1886.737910] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1886.759392] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.759568] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.759694] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.759824] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.759948] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.760085] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.760210] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.760330] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.760448] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.760563] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1886.760684] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1889.738403] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1889.738762] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1889.738962] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1902.061035] env[62730]: WARNING oslo_vmware.rw_handles [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1902.061035] env[62730]: ERROR oslo_vmware.rw_handles [ 1902.061035] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1902.063260] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1902.063555] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Copying Virtual Disk [datastore2] vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/c84f2e78-2d34-4741-b2b6-f7a4788f8e5f/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1902.063850] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6ecf2bc-d56b-4caf-a079-f8e8dcc2000b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.072595] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Waiting for the task: (returnval){ [ 1902.072595] env[62730]: value = "task-4837267" [ 1902.072595] env[62730]: _type = "Task" [ 1902.072595] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.081279] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Task: {'id': task-4837267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.583827] env[62730]: DEBUG oslo_vmware.exceptions [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1902.584115] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.584723] env[62730]: ERROR nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1902.584723] env[62730]: Faults: ['InvalidArgument'] [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Traceback (most recent call last): [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] yield resources [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self.driver.spawn(context, instance, image_meta, [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self._fetch_image_if_missing(context, vi) [ 1902.584723] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] image_cache(vi, tmp_image_ds_loc) [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] vm_util.copy_virtual_disk( [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] session._wait_for_task(vmdk_copy_task) [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] return self.wait_for_task(task_ref) [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] return evt.wait() [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] result = hub.switch() [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1902.585365] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] return self.greenlet.switch() [ 1902.585768] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1902.585768] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self.f(*self.args, **self.kw) [ 1902.585768] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1902.585768] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] raise exceptions.translate_fault(task_info.error) [ 1902.585768] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1902.585768] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Faults: ['InvalidArgument'] [ 1902.585768] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] [ 1902.585768] env[62730]: INFO nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Terminating instance [ 1902.586973] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.587198] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1902.587479] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f67df2a6-3f15-4545-9089-ddc593eecf04 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.589863] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1902.590100] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1902.590912] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022199a7-35d5-44fe-bbd6-7c8aabda6906 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.598671] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1902.598889] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31de97af-5661-4611-8d25-e5fc0db24a06 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.601366] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1902.601544] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1902.602636] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ab66bc0-1cf6-461e-b104-6cbd2db42d24 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.608110] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1902.608110] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]524a7292-0398-934b-7ec4-007c9d63ce1e" [ 1902.608110] env[62730]: _type = "Task" [ 1902.608110] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.623877] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1902.624177] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating directory with path [datastore2] vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1902.624443] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48873cd5-553c-4ee4-8342-18832f435713 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.637753] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Created directory with path [datastore2] vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1902.637992] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Fetch image to [datastore2] vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1902.638225] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1902.639149] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783755ec-53f2-4ece-b2a8-2ce009ca0378 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.648446] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f62d2a-7a24-4113-a8a1-4ad1a7a3b94b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.661435] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6e0a50-abe5-4272-9a51-8f0a4e402403 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.699056] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1ae489-afeb-4a33-bb72-a1ffb35f3f81 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.701844] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1902.702082] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1902.702266] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Deleting the datastore file [datastore2] ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1902.702515] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e5c7c93-fd3b-4c7f-9e6d-c02bedab5303 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.710438] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-76d6ed45-2185-4205-aeeb-fbef3aac187f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.712648] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Waiting for the task: (returnval){ [ 1902.712648] env[62730]: value = "task-4837269" [ 1902.712648] env[62730]: _type = "Task" [ 1902.712648] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.723817] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Task: {'id': task-4837269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.742408] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1902.880330] env[62730]: DEBUG oslo_vmware.rw_handles [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1902.943933] env[62730]: DEBUG oslo_vmware.rw_handles [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1902.944196] env[62730]: DEBUG oslo_vmware.rw_handles [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1903.223608] env[62730]: DEBUG oslo_vmware.api [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Task: {'id': task-4837269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091584} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.224142] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1903.224142] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1903.224142] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1903.224451] env[62730]: INFO nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1903.226768] env[62730]: DEBUG nova.compute.claims [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1903.226955] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.227178] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.433940] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d5787d-2105-438d-95a2-7977310a6cba {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.442309] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346aa495-f50e-4d1b-922a-d839093403d4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.472370] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774779e4-3a88-4578-8aa9-b22ec376dab4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.480461] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf4f117-c83c-40d7-af13-c7ac3fd75cbb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.495974] env[62730]: DEBUG nova.compute.provider_tree [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1903.505561] env[62730]: DEBUG nova.scheduler.client.report [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1903.523372] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.523943] env[62730]: ERROR nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1903.523943] env[62730]: Faults: ['InvalidArgument'] [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Traceback (most recent call last): [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self.driver.spawn(context, instance, image_meta, [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self._fetch_image_if_missing(context, vi) [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] image_cache(vi, tmp_image_ds_loc) [ 1903.523943] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] vm_util.copy_virtual_disk( [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] session._wait_for_task(vmdk_copy_task) [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] return self.wait_for_task(task_ref) [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] return evt.wait() [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] result = hub.switch() [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] return self.greenlet.switch() [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1903.524467] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] self.f(*self.args, **self.kw) [ 1903.524835] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1903.524835] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] raise exceptions.translate_fault(task_info.error) [ 1903.524835] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1903.524835] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Faults: ['InvalidArgument'] [ 1903.524835] env[62730]: ERROR nova.compute.manager [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] [ 1903.524835] env[62730]: DEBUG nova.compute.utils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1903.526427] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Build of instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 was re-scheduled: A specified parameter was not correct: fileType [ 1903.526427] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1903.526821] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1903.527009] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1903.527193] env[62730]: DEBUG nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1903.527491] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1903.928660] env[62730]: DEBUG nova.network.neutron [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.945216] env[62730]: INFO nova.compute.manager [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Took 0.42 seconds to deallocate network for instance. [ 1904.063260] env[62730]: INFO nova.scheduler.client.report [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Deleted allocations for instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 [ 1904.088430] env[62730]: DEBUG oslo_concurrency.lockutils [None req-584be1b9-eba6-461b-8e52-146dcd4b3904 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 654.795s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.089758] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 459.422s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.089988] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Acquiring lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.090216] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.090392] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.093229] env[62730]: INFO nova.compute.manager [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Terminating instance [ 1904.095128] env[62730]: DEBUG nova.compute.manager [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1904.095335] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1904.096069] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f104554-ba0b-4834-9e43-95aa8700a5c2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.107304] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4890b4c4-e026-4c67-946f-28120bcfa1b7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.123867] env[62730]: DEBUG nova.compute.manager [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1904.148952] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ca80cf5a-da64-4e2a-ae70-c86ba1c3a491 could not be found. [ 1904.149190] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1904.149398] env[62730]: INFO nova.compute.manager [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1904.149678] env[62730]: DEBUG oslo.service.loopingcall [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1904.149932] env[62730]: DEBUG nova.compute.manager [-] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1904.150037] env[62730]: DEBUG nova.network.neutron [-] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1904.184462] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.184721] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.186190] env[62730]: INFO nova.compute.claims [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1904.190308] env[62730]: DEBUG nova.network.neutron [-] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.201232] env[62730]: INFO nova.compute.manager [-] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] Took 0.05 seconds to deallocate network for instance. [ 1904.299704] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2263a592-c3d8-4231-91fb-e40caecbf669 tempest-InstanceActionsV221TestJSON-1241381242 tempest-InstanceActionsV221TestJSON-1241381242-project-member] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.300625] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 189.193s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.300814] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: ca80cf5a-da64-4e2a-ae70-c86ba1c3a491] During sync_power_state the instance has a pending task (deleting). Skip. [ 1904.301121] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "ca80cf5a-da64-4e2a-ae70-c86ba1c3a491" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.398338] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1caf201c-b188-4639-9819-647427af56fe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.406553] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1747f737-65a6-4ed3-a766-072a7470d932 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.437505] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa17a88-d262-4b50-9e2b-940f9f115437 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.446187] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc52cfa8-3ee3-44ce-8e43-8fd879b35c6e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.460289] env[62730]: DEBUG nova.compute.provider_tree [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.469850] env[62730]: DEBUG nova.scheduler.client.report [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1904.485156] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.485650] env[62730]: DEBUG nova.compute.manager [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1904.520516] env[62730]: DEBUG nova.compute.utils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1904.522105] env[62730]: DEBUG nova.compute.manager [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1904.522284] env[62730]: DEBUG nova.network.neutron [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1904.533466] env[62730]: DEBUG nova.compute.manager [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1904.586312] env[62730]: DEBUG nova.policy [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53339aecfbb349e09be6d90418d2a5ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '292f9661bffa4d2a98d4d8df60a44534', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1904.625228] env[62730]: DEBUG nova.compute.manager [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1904.653163] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1904.653433] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1904.653593] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1904.653775] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1904.653923] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1904.654085] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1904.654304] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1904.654466] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1904.654636] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1904.654801] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1904.654976] env[62730]: DEBUG nova.virt.hardware [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1904.655845] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd5dc0a-c6ad-4226-8309-c736a254abb9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.664927] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4228eaa-098b-4246-a960-61ccf2aaaec6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.029826] env[62730]: DEBUG nova.network.neutron [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Successfully created port: 70eabe62-592c-471b-8112-6ac3e8376b8e {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1905.990877] env[62730]: DEBUG nova.compute.manager [req-a11576ab-405d-43c0-8212-9d2bf0d4a1d1 req-8b4c2d34-6851-4b48-9768-e66c370ec4c3 service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Received event network-vif-plugged-70eabe62-592c-471b-8112-6ac3e8376b8e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1905.991166] env[62730]: DEBUG oslo_concurrency.lockutils [req-a11576ab-405d-43c0-8212-9d2bf0d4a1d1 req-8b4c2d34-6851-4b48-9768-e66c370ec4c3 service nova] Acquiring lock "5b182a44-2add-42f6-913d-14c5379e76be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.991344] env[62730]: DEBUG oslo_concurrency.lockutils [req-a11576ab-405d-43c0-8212-9d2bf0d4a1d1 req-8b4c2d34-6851-4b48-9768-e66c370ec4c3 service nova] Lock "5b182a44-2add-42f6-913d-14c5379e76be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.991517] env[62730]: DEBUG oslo_concurrency.lockutils [req-a11576ab-405d-43c0-8212-9d2bf0d4a1d1 req-8b4c2d34-6851-4b48-9768-e66c370ec4c3 service nova] Lock "5b182a44-2add-42f6-913d-14c5379e76be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.991685] env[62730]: DEBUG nova.compute.manager [req-a11576ab-405d-43c0-8212-9d2bf0d4a1d1 req-8b4c2d34-6851-4b48-9768-e66c370ec4c3 service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] No waiting events found dispatching network-vif-plugged-70eabe62-592c-471b-8112-6ac3e8376b8e {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1905.991854] env[62730]: WARNING nova.compute.manager [req-a11576ab-405d-43c0-8212-9d2bf0d4a1d1 req-8b4c2d34-6851-4b48-9768-e66c370ec4c3 service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Received unexpected event network-vif-plugged-70eabe62-592c-471b-8112-6ac3e8376b8e for instance with vm_state building and task_state spawning. [ 1906.089753] env[62730]: DEBUG nova.network.neutron [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Successfully updated port: 70eabe62-592c-471b-8112-6ac3e8376b8e {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1906.108478] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquiring lock "refresh_cache-5b182a44-2add-42f6-913d-14c5379e76be" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.108478] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquired lock "refresh_cache-5b182a44-2add-42f6-913d-14c5379e76be" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.108678] env[62730]: DEBUG nova.network.neutron [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1906.148586] env[62730]: DEBUG nova.network.neutron [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1906.449600] env[62730]: DEBUG nova.network.neutron [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Updating instance_info_cache with network_info: [{"id": "70eabe62-592c-471b-8112-6ac3e8376b8e", "address": "fa:16:3e:b9:59:0f", "network": {"id": "3b975718-a5e2-4039-b3df-d3de7c4cbebf", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-368336415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "292f9661bffa4d2a98d4d8df60a44534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70eabe62-59", "ovs_interfaceid": "70eabe62-592c-471b-8112-6ac3e8376b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.465443] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Releasing lock "refresh_cache-5b182a44-2add-42f6-913d-14c5379e76be" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.465760] env[62730]: DEBUG nova.compute.manager [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Instance network_info: |[{"id": "70eabe62-592c-471b-8112-6ac3e8376b8e", "address": "fa:16:3e:b9:59:0f", "network": {"id": "3b975718-a5e2-4039-b3df-d3de7c4cbebf", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-368336415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "292f9661bffa4d2a98d4d8df60a44534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70eabe62-59", "ovs_interfaceid": "70eabe62-592c-471b-8112-6ac3e8376b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1906.466306] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:59:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52358fcc-0d9f-45dd-8c75-db533fd992c3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70eabe62-592c-471b-8112-6ac3e8376b8e', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1906.474313] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Creating folder: Project (292f9661bffa4d2a98d4d8df60a44534). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1906.474904] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4173772-e8f5-45e0-9c61-ec23f92ba533 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.487176] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Created folder: Project (292f9661bffa4d2a98d4d8df60a44534) in parent group-v942928. [ 1906.487348] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Creating folder: Instances. Parent ref: group-v943037. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1906.487547] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89857689-99ec-4b6f-81ea-0dd8d44624a4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.498030] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Created folder: Instances in parent group-v943037. [ 1906.498315] env[62730]: DEBUG oslo.service.loopingcall [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1906.498543] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1906.498770] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f964c8a-2428-4d18-9ae3-661a761c4c54 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.520660] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1906.520660] env[62730]: value = "task-4837272" [ 1906.520660] env[62730]: _type = "Task" [ 1906.520660] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.529212] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837272, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.030539] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837272, 'name': CreateVM_Task, 'duration_secs': 0.30272} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.030925] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1907.037911] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.038104] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.038450] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1907.038724] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73ed291b-b9df-47fb-979b-e5df98cd3797 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.043662] env[62730]: DEBUG oslo_vmware.api [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Waiting for the task: (returnval){ [ 1907.043662] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]521add9d-4b8d-b46b-69a9-895dbce1062d" [ 1907.043662] env[62730]: _type = "Task" [ 1907.043662] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.051827] env[62730]: DEBUG oslo_vmware.api [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]521add9d-4b8d-b46b-69a9-895dbce1062d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.553836] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.554105] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1907.554321] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.020863] env[62730]: DEBUG nova.compute.manager [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Received event network-changed-70eabe62-592c-471b-8112-6ac3e8376b8e {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1908.021180] env[62730]: DEBUG nova.compute.manager [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Refreshing instance network info cache due to event network-changed-70eabe62-592c-471b-8112-6ac3e8376b8e. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1908.021329] env[62730]: DEBUG oslo_concurrency.lockutils [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] Acquiring lock "refresh_cache-5b182a44-2add-42f6-913d-14c5379e76be" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.021505] env[62730]: DEBUG oslo_concurrency.lockutils [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] Acquired lock "refresh_cache-5b182a44-2add-42f6-913d-14c5379e76be" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.021683] env[62730]: DEBUG nova.network.neutron [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Refreshing network info cache for port 70eabe62-592c-471b-8112-6ac3e8376b8e {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1908.289462] env[62730]: DEBUG nova.network.neutron [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Updated VIF entry in instance network info cache for port 70eabe62-592c-471b-8112-6ac3e8376b8e. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1908.289872] env[62730]: DEBUG nova.network.neutron [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Updating instance_info_cache with network_info: [{"id": "70eabe62-592c-471b-8112-6ac3e8376b8e", "address": "fa:16:3e:b9:59:0f", "network": {"id": "3b975718-a5e2-4039-b3df-d3de7c4cbebf", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-368336415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "292f9661bffa4d2a98d4d8df60a44534", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70eabe62-59", "ovs_interfaceid": "70eabe62-592c-471b-8112-6ac3e8376b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.301063] env[62730]: DEBUG oslo_concurrency.lockutils [req-da5c78a3-aac3-43c7-b5b6-de3187ad0ec1 req-98e051f5-049c-4f57-850d-4c901267705a service nova] Releasing lock "refresh_cache-5b182a44-2add-42f6-913d-14c5379e76be" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.900748] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "344fc477-d506-43bf-9fc7-e03889a43202" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.737679] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1939.733515] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.737835] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.738037] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.738037] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.738037] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.750847] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.751071] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.751230] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.751388] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1945.752545] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7491ece5-8adc-4c41-88a3-19b498f2aa66 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.761509] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f885e00d-864f-4fef-904a-5f13dcb1c6df {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.775979] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d736a74e-0ee3-4922-b497-12f68c790c74 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.782859] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d16df5f-8c45-4fa0-9590-0eafa7bafd8d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.813177] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180513MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1945.813347] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.813546] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.896023] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896023] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896023] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896023] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896270] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896270] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896270] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896389] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896488] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896610] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1945.896802] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1945.896954] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '13', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_1ca2739fcb8b4c7db333ac9aa362ca50': '1', 'io_workload': '10', 'num_proj_47edc70d81cc4ea68d8da7bec4c625d0': '1', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '1', 'num_task_spawning': '2', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '2', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1946.041236] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5b8065-7e85-44a2-a212-7d47a4170029 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.049658] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f08d3c-cfd8-4c54-ae23-279bbf9cb8aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.078679] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c950e66-7148-4c19-b89a-e188eace6203 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.085964] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233609dd-7702-44c6-93ae-aca643313f5b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.100229] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1946.111555] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1946.127987] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1946.128230] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.315s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.123863] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1947.147037] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1947.147230] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1947.147279] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1947.169742] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.169914] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170092] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170231] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170367] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170493] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170614] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170732] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170852] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.170969] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1947.171103] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1948.436065] env[62730]: WARNING oslo_vmware.rw_handles [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1948.436065] env[62730]: ERROR oslo_vmware.rw_handles [ 1948.436065] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1948.438104] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1948.438433] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Copying Virtual Disk [datastore2] vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/9e5477a6-38ae-4bfc-b7c7-c6a98e9bcf79/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1948.438747] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22369a85-a17c-4fbf-b5ea-ab7edf65a3e0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.446945] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1948.446945] env[62730]: value = "task-4837273" [ 1948.446945] env[62730]: _type = "Task" [ 1948.446945] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.455731] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': task-4837273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.958191] env[62730]: DEBUG oslo_vmware.exceptions [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1948.958687] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.959251] env[62730]: ERROR nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1948.959251] env[62730]: Faults: ['InvalidArgument'] [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Traceback (most recent call last): [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] yield resources [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self.driver.spawn(context, instance, image_meta, [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self._fetch_image_if_missing(context, vi) [ 1948.959251] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] image_cache(vi, tmp_image_ds_loc) [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] vm_util.copy_virtual_disk( [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] session._wait_for_task(vmdk_copy_task) [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] return self.wait_for_task(task_ref) [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] return evt.wait() [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] result = hub.switch() [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1948.959841] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] return self.greenlet.switch() [ 1948.960764] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1948.960764] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self.f(*self.args, **self.kw) [ 1948.960764] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1948.960764] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] raise exceptions.translate_fault(task_info.error) [ 1948.960764] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1948.960764] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Faults: ['InvalidArgument'] [ 1948.960764] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] [ 1948.960764] env[62730]: INFO nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Terminating instance [ 1948.961357] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.961357] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1948.961357] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44f68f69-8468-4ad2-9f47-d114bc918acf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.963865] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1948.964770] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1948.964828] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee25e05b-d0bc-4dcf-8ed0-8b55feda391c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.972193] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1948.972446] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b951fd5-9314-476a-b111-6fe73072c8ff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.974839] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1948.975028] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1948.976049] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d1db309-dd4e-48a0-823c-c31efb56602b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.981140] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 1948.981140] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]526e3a44-9877-1bb7-ac5c-0e4227eed94c" [ 1948.981140] env[62730]: _type = "Task" [ 1948.981140] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.988729] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]526e3a44-9877-1bb7-ac5c-0e4227eed94c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.046722] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1949.047034] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1949.047247] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Deleting the datastore file [datastore2] 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1949.047527] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c19f1d5-8b74-42e2-b939-18d4c19e308d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.054176] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for the task: (returnval){ [ 1949.054176] env[62730]: value = "task-4837275" [ 1949.054176] env[62730]: _type = "Task" [ 1949.054176] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.062517] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': task-4837275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.491418] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1949.491802] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating directory with path [datastore2] vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1949.491905] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8934edcc-87fa-45f0-8ebc-6c0a73f5addd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.503406] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Created directory with path [datastore2] vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1949.503621] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Fetch image to [datastore2] vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1949.503805] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1949.504593] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9afd7b0-ab0d-4bb1-9ddd-fdd204ca0dbe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.511558] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50552e1e-a504-4902-8d3d-0bd3ce350c5d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.520871] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02fcffb-7f16-4cd4-aa16-393d3a320c99 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.554085] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392f4df1-7b17-4845-b4c4-bb6b15f68223 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.566786] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-240c9adb-96c8-4b08-9418-65eef7f194d0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.568795] env[62730]: DEBUG oslo_vmware.api [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Task: {'id': task-4837275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078885} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.569069] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1949.569270] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1949.569439] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1949.569624] env[62730]: INFO nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1949.571953] env[62730]: DEBUG nova.compute.claims [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1949.572173] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.572411] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.593845] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1949.737641] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.742717] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1949.802648] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1949.802951] env[62730]: DEBUG oslo_vmware.rw_handles [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1949.827074] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554a5a9d-cc81-4585-9c8c-0d71d1d1c702 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.835381] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9988273a-6ef6-47ee-b210-e01181059d25 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.867081] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d48531-ac3c-4843-a5ab-eb39b0c0d768 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.875632] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd80002a-b40f-4b5e-a6df-0ac95e2c7836 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.890080] env[62730]: DEBUG nova.compute.provider_tree [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1949.900887] env[62730]: DEBUG nova.scheduler.client.report [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1949.914976] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.342s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.915553] env[62730]: ERROR nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1949.915553] env[62730]: Faults: ['InvalidArgument'] [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Traceback (most recent call last): [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self.driver.spawn(context, instance, image_meta, [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self._fetch_image_if_missing(context, vi) [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] image_cache(vi, tmp_image_ds_loc) [ 1949.915553] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] vm_util.copy_virtual_disk( [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] session._wait_for_task(vmdk_copy_task) [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] return self.wait_for_task(task_ref) [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] return evt.wait() [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] result = hub.switch() [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] return self.greenlet.switch() [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1949.915945] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] self.f(*self.args, **self.kw) [ 1949.916621] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1949.916621] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] raise exceptions.translate_fault(task_info.error) [ 1949.916621] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1949.916621] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Faults: ['InvalidArgument'] [ 1949.916621] env[62730]: ERROR nova.compute.manager [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] [ 1949.916621] env[62730]: DEBUG nova.compute.utils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1949.918334] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Build of instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 was re-scheduled: A specified parameter was not correct: fileType [ 1949.918334] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1949.918712] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1949.918890] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1949.919134] env[62730]: DEBUG nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1949.919373] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1950.297931] env[62730]: DEBUG nova.network.neutron [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.315305] env[62730]: INFO nova.compute.manager [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Took 0.39 seconds to deallocate network for instance. [ 1950.431915] env[62730]: INFO nova.scheduler.client.report [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Deleted allocations for instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 [ 1950.455539] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8b05ac49-569a-4f13-928f-03ff54ed8c3d tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.607s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.455734] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 389.668s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.456942] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Acquiring lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.456942] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.456942] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.458793] env[62730]: INFO nova.compute.manager [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Terminating instance [ 1950.461036] env[62730]: DEBUG nova.compute.manager [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1950.461167] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1950.461709] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2067566c-bea1-4bad-b7b9-9243006dcc93 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.473953] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5842197a-5e8b-41b3-aa58-04b9bc802ff5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.504979] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10 could not be found. [ 1950.505334] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1950.505509] env[62730]: INFO nova.compute.manager [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1950.505790] env[62730]: DEBUG oslo.service.loopingcall [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.506008] env[62730]: DEBUG nova.compute.manager [-] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1950.506131] env[62730]: DEBUG nova.network.neutron [-] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1950.534931] env[62730]: DEBUG nova.network.neutron [-] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.544619] env[62730]: INFO nova.compute.manager [-] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] Took 0.04 seconds to deallocate network for instance. [ 1950.674200] env[62730]: DEBUG oslo_concurrency.lockutils [None req-cbbd1f52-06af-48be-ba25-90b5c5b373a3 tempest-AttachVolumeShelveTestJSON-1995286572 tempest-AttachVolumeShelveTestJSON-1995286572-project-member] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.218s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.675141] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 235.568s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.675375] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 6dff3e96-31d0-4964-8a5e-f15ab8fdbb10] During sync_power_state the instance has a pending task (deleting). Skip. [ 1950.675597] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "6dff3e96-31d0-4964-8a5e-f15ab8fdbb10" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.737540] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1950.737762] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1977.666016] env[62730]: DEBUG oslo_concurrency.lockutils [None req-edcf93bc-2809-4614-9750-94afc4e641fa tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquiring lock "5b182a44-2add-42f6-913d-14c5379e76be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.539469] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquiring lock "8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.539884] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Lock "8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.553301] env[62730]: DEBUG nova.compute.manager [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1981.605017] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.605311] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.606831] env[62730]: INFO nova.compute.claims [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1981.833269] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33638a4-6bf5-4dc9-85ca-ad5c2fe64934 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.842986] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75336833-9f27-4c85-bd34-f1b6663fa5af {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.877271] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0926ea-53ab-4cf3-af71-c82371481a64 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.885877] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89918928-02a5-4eb1-9466-fac1078a120e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.900243] env[62730]: DEBUG nova.compute.provider_tree [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.909912] env[62730]: DEBUG nova.scheduler.client.report [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1981.924707] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.925232] env[62730]: DEBUG nova.compute.manager [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1981.968028] env[62730]: DEBUG nova.compute.utils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1981.969089] env[62730]: DEBUG nova.compute.manager [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1981.969282] env[62730]: DEBUG nova.network.neutron [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1981.978624] env[62730]: DEBUG nova.compute.manager [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1982.053100] env[62730]: DEBUG nova.compute.manager [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1982.059278] env[62730]: DEBUG nova.policy [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c194b121bb64527aa9c2a097117c4ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f54137e1151d46fe9ba541e5e2bce843', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 1982.088513] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1982.088867] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1982.089101] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1982.089355] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1982.089563] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1982.089771] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1982.090048] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1982.090280] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1982.090511] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1982.090736] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1982.090986] env[62730]: DEBUG nova.virt.hardware [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1982.092255] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f539e6-44a6-4081-aef4-70a1c7e8c364 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.101611] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf759377-511c-4b5c-9627-c0576b7ac53a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.515073] env[62730]: DEBUG nova.network.neutron [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Successfully created port: 7634a446-da34-40eb-aee8-15902108c2c1 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1983.114589] env[62730]: DEBUG nova.compute.manager [req-f00ba9a0-2a79-4ec5-9222-7925a6a7d393 req-074148ab-f4db-4f0e-a037-06054563af74 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Received event network-vif-plugged-7634a446-da34-40eb-aee8-15902108c2c1 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1983.114827] env[62730]: DEBUG oslo_concurrency.lockutils [req-f00ba9a0-2a79-4ec5-9222-7925a6a7d393 req-074148ab-f4db-4f0e-a037-06054563af74 service nova] Acquiring lock "8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.115016] env[62730]: DEBUG oslo_concurrency.lockutils [req-f00ba9a0-2a79-4ec5-9222-7925a6a7d393 req-074148ab-f4db-4f0e-a037-06054563af74 service nova] Lock "8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.115204] env[62730]: DEBUG oslo_concurrency.lockutils [req-f00ba9a0-2a79-4ec5-9222-7925a6a7d393 req-074148ab-f4db-4f0e-a037-06054563af74 service nova] Lock "8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.115378] env[62730]: DEBUG nova.compute.manager [req-f00ba9a0-2a79-4ec5-9222-7925a6a7d393 req-074148ab-f4db-4f0e-a037-06054563af74 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] No waiting events found dispatching network-vif-plugged-7634a446-da34-40eb-aee8-15902108c2c1 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1983.115544] env[62730]: WARNING nova.compute.manager [req-f00ba9a0-2a79-4ec5-9222-7925a6a7d393 req-074148ab-f4db-4f0e-a037-06054563af74 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Received unexpected event network-vif-plugged-7634a446-da34-40eb-aee8-15902108c2c1 for instance with vm_state building and task_state spawning. [ 1983.221329] env[62730]: DEBUG nova.network.neutron [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Successfully updated port: 7634a446-da34-40eb-aee8-15902108c2c1 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1983.238775] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquiring lock "refresh_cache-8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.238957] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquired lock "refresh_cache-8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.239218] env[62730]: DEBUG nova.network.neutron [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1983.314113] env[62730]: DEBUG nova.network.neutron [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1983.823248] env[62730]: DEBUG nova.network.neutron [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Updating instance_info_cache with network_info: [{"id": "7634a446-da34-40eb-aee8-15902108c2c1", "address": "fa:16:3e:2f:20:bd", "network": {"id": "13308c2e-b624-48c1-9ecc-76e4196392fc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2049844916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f54137e1151d46fe9ba541e5e2bce843", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7634a446-da", "ovs_interfaceid": "7634a446-da34-40eb-aee8-15902108c2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.836537] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Releasing lock "refresh_cache-8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.836831] env[62730]: DEBUG nova.compute.manager [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Instance network_info: |[{"id": "7634a446-da34-40eb-aee8-15902108c2c1", "address": "fa:16:3e:2f:20:bd", "network": {"id": "13308c2e-b624-48c1-9ecc-76e4196392fc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2049844916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f54137e1151d46fe9ba541e5e2bce843", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7634a446-da", "ovs_interfaceid": "7634a446-da34-40eb-aee8-15902108c2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1983.837240] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:20:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e55c248-c504-4c7a-bbe9-f42cf417aee7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7634a446-da34-40eb-aee8-15902108c2c1', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1983.844555] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Creating folder: Project (f54137e1151d46fe9ba541e5e2bce843). Parent ref: group-v942928. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1983.845075] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b72a03f6-62a7-4618-bd07-6320f5fa729c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.857425] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Created folder: Project (f54137e1151d46fe9ba541e5e2bce843) in parent group-v942928. [ 1983.857659] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Creating folder: Instances. Parent ref: group-v943040. {{(pid=62730) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1983.857899] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9a31112-accc-417e-9ca2-522433762049 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.866289] env[62730]: INFO nova.virt.vmwareapi.vm_util [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Created folder: Instances in parent group-v943040. [ 1983.866525] env[62730]: DEBUG oslo.service.loopingcall [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1983.866736] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1983.866907] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e906c6f-34f9-44cf-9727-0ea8005a6c82 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.885391] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1983.885391] env[62730]: value = "task-4837278" [ 1983.885391] env[62730]: _type = "Task" [ 1983.885391] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.893023] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837278, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.395298] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837278, 'name': CreateVM_Task, 'duration_secs': 0.280308} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.395621] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1984.396189] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.396363] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.396678] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1984.396927] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e00d2a-75fb-452b-a79c-32277c1e151f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.401564] env[62730]: DEBUG oslo_vmware.api [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Waiting for the task: (returnval){ [ 1984.401564] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5273bdb1-d363-d62e-af17-d481c18fe4b5" [ 1984.401564] env[62730]: _type = "Task" [ 1984.401564] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.409466] env[62730]: DEBUG oslo_vmware.api [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5273bdb1-d363-d62e-af17-d481c18fe4b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.912511] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.912734] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1984.912950] env[62730]: DEBUG oslo_concurrency.lockutils [None req-b2e7d03f-e2d5-4eea-ae01-b9420cd97366 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.144511] env[62730]: DEBUG nova.compute.manager [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Received event network-changed-7634a446-da34-40eb-aee8-15902108c2c1 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1985.144783] env[62730]: DEBUG nova.compute.manager [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Refreshing instance network info cache due to event network-changed-7634a446-da34-40eb-aee8-15902108c2c1. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1985.145028] env[62730]: DEBUG oslo_concurrency.lockutils [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] Acquiring lock "refresh_cache-8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.145184] env[62730]: DEBUG oslo_concurrency.lockutils [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] Acquired lock "refresh_cache-8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.145353] env[62730]: DEBUG nova.network.neutron [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Refreshing network info cache for port 7634a446-da34-40eb-aee8-15902108c2c1 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1985.458694] env[62730]: DEBUG nova.network.neutron [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Updated VIF entry in instance network info cache for port 7634a446-da34-40eb-aee8-15902108c2c1. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1985.459130] env[62730]: DEBUG nova.network.neutron [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Updating instance_info_cache with network_info: [{"id": "7634a446-da34-40eb-aee8-15902108c2c1", "address": "fa:16:3e:2f:20:bd", "network": {"id": "13308c2e-b624-48c1-9ecc-76e4196392fc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-2049844916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f54137e1151d46fe9ba541e5e2bce843", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e55c248-c504-4c7a-bbe9-f42cf417aee7", "external-id": "nsx-vlan-transportzone-471", "segmentation_id": 471, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7634a446-da", "ovs_interfaceid": "7634a446-da34-40eb-aee8-15902108c2c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.469312] env[62730]: DEBUG oslo_concurrency.lockutils [req-1c9475fc-d5dc-42da-88ec-b4c82abce1e6 req-feae38c0-d65a-4b69-a9bd-cdc23c8df427 service nova] Releasing lock "refresh_cache-8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.739987] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.951249] env[62730]: WARNING oslo_vmware.rw_handles [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1994.951249] env[62730]: ERROR oslo_vmware.rw_handles [ 1994.952038] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1994.954311] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1994.954567] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Copying Virtual Disk [datastore2] vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/b5fa889d-1f83-4925-a0a2-82b3bbbf47a3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1994.955270] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d9b3d1d-eeda-458f-a7f1-59ee71c625f4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.964808] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 1994.964808] env[62730]: value = "task-4837279" [ 1994.964808] env[62730]: _type = "Task" [ 1994.964808] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.974285] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': task-4837279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.475432] env[62730]: DEBUG oslo_vmware.exceptions [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1995.475705] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.476335] env[62730]: ERROR nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1995.476335] env[62730]: Faults: ['InvalidArgument'] [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Traceback (most recent call last): [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] yield resources [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self.driver.spawn(context, instance, image_meta, [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self._fetch_image_if_missing(context, vi) [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1995.476335] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] image_cache(vi, tmp_image_ds_loc) [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] vm_util.copy_virtual_disk( [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] session._wait_for_task(vmdk_copy_task) [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] return self.wait_for_task(task_ref) [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] return evt.wait() [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] result = hub.switch() [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] return self.greenlet.switch() [ 1995.476847] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1995.477333] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self.f(*self.args, **self.kw) [ 1995.477333] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1995.477333] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] raise exceptions.translate_fault(task_info.error) [ 1995.477333] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1995.477333] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Faults: ['InvalidArgument'] [ 1995.477333] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] [ 1995.477333] env[62730]: INFO nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Terminating instance [ 1995.478273] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.478479] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1995.478717] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be5ce125-64ca-43b7-b63f-4ab049f567d8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.482195] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1995.482387] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1995.483121] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b67c2dc-1699-4bed-acb7-1a5a445af8e7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.489959] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1995.490199] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d6b3164-672f-4e4d-a028-3fc7770ef721 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.492376] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1995.492549] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1995.493527] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec64de81-b431-4ba6-9700-17c507cfa05d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.498758] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Waiting for the task: (returnval){ [ 1995.498758] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52de712c-3c39-660b-9288-8169c7bfd5b0" [ 1995.498758] env[62730]: _type = "Task" [ 1995.498758] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.512396] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52de712c-3c39-660b-9288-8169c7bfd5b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.563686] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1995.563914] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1995.564112] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Deleting the datastore file [datastore2] a5a39785-b18a-4d18-a0af-8b4065c354f2 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1995.564402] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72eac460-eb30-4708-9eff-654af96d2afc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.570611] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for the task: (returnval){ [ 1995.570611] env[62730]: value = "task-4837281" [ 1995.570611] env[62730]: _type = "Task" [ 1995.570611] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.579298] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': task-4837281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.745192] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.745396] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances with incomplete migration {{(pid=62730) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1996.010105] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1996.010490] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Creating directory with path [datastore2] vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1996.010615] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a2c3049-302a-44fa-b376-5d07a4f1094f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.022443] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Created directory with path [datastore2] vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1996.022639] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Fetch image to [datastore2] vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1996.022814] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1996.023632] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd59833-e490-4e2a-b4d2-9b4d9a3446dd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.032550] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d63d064-8508-4c20-bf0f-0750ee25128f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.043247] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c32a13-004f-49ed-bda3-435c13513910 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.077817] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf99e46-ccc8-4f99-8a18-6e4fa62eab2b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.087240] env[62730]: DEBUG oslo_vmware.api [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Task: {'id': task-4837281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06672} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.089034] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1996.089302] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1996.089505] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1996.089712] env[62730]: INFO nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1996.091772] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cc69a89e-4760-42f3-986a-a7a74dd14542 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.094476] env[62730]: DEBUG nova.compute.claims [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1996.094647] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1996.094857] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1996.112372] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1996.276835] env[62730]: DEBUG oslo_vmware.rw_handles [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1996.334021] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ee71b0-ba4c-42c1-8815-56aa0f763418 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.338630] env[62730]: DEBUG oslo_vmware.rw_handles [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1996.338800] env[62730]: DEBUG oslo_vmware.rw_handles [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1996.343535] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114be3f9-9d46-4249-ab97-6bb588557103 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.376583] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdfae37-1735-490d-a3c5-86d472b75429 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.385153] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118da299-9408-4ee0-9500-0f9c9a922f21 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.399327] env[62730]: DEBUG nova.compute.provider_tree [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1996.408095] env[62730]: DEBUG nova.scheduler.client.report [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1996.422893] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.423464] env[62730]: ERROR nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1996.423464] env[62730]: Faults: ['InvalidArgument'] [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Traceback (most recent call last): [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self.driver.spawn(context, instance, image_meta, [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self._fetch_image_if_missing(context, vi) [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] image_cache(vi, tmp_image_ds_loc) [ 1996.423464] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] vm_util.copy_virtual_disk( [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] session._wait_for_task(vmdk_copy_task) [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] return self.wait_for_task(task_ref) [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] return evt.wait() [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] result = hub.switch() [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] return self.greenlet.switch() [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1996.424054] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] self.f(*self.args, **self.kw) [ 1996.424764] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1996.424764] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] raise exceptions.translate_fault(task_info.error) [ 1996.424764] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1996.424764] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Faults: ['InvalidArgument'] [ 1996.424764] env[62730]: ERROR nova.compute.manager [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] [ 1996.424764] env[62730]: DEBUG nova.compute.utils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1996.425687] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Build of instance a5a39785-b18a-4d18-a0af-8b4065c354f2 was re-scheduled: A specified parameter was not correct: fileType [ 1996.425687] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1996.426088] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1996.426283] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1996.426492] env[62730]: DEBUG nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1996.426679] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1996.730194] env[62730]: DEBUG nova.network.neutron [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.741507] env[62730]: INFO nova.compute.manager [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Took 0.31 seconds to deallocate network for instance. [ 1996.848862] env[62730]: INFO nova.scheduler.client.report [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Deleted allocations for instance a5a39785-b18a-4d18-a0af-8b4065c354f2 [ 1996.873808] env[62730]: DEBUG oslo_concurrency.lockutils [None req-d67784f8-5f1d-4e78-b9cd-721fe8088479 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 615.098s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.874252] env[62730]: DEBUG oslo_concurrency.lockutils [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 418.918s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1996.874591] env[62730]: DEBUG oslo_concurrency.lockutils [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Acquiring lock "a5a39785-b18a-4d18-a0af-8b4065c354f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1996.874903] env[62730]: DEBUG oslo_concurrency.lockutils [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1996.875160] env[62730]: DEBUG oslo_concurrency.lockutils [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.877658] env[62730]: INFO nova.compute.manager [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Terminating instance [ 1996.879553] env[62730]: DEBUG nova.compute.manager [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1996.879752] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1996.880294] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09191475-dcb9-4d23-a180-1b3fa0728dda {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.890348] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf268d9-f534-4239-9b6c-acbff47e25e9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.922333] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a5a39785-b18a-4d18-a0af-8b4065c354f2 could not be found. [ 1996.922558] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1996.922745] env[62730]: INFO nova.compute.manager [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1996.923012] env[62730]: DEBUG oslo.service.loopingcall [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1996.923319] env[62730]: DEBUG nova.compute.manager [-] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1996.923388] env[62730]: DEBUG nova.network.neutron [-] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1996.959790] env[62730]: DEBUG nova.network.neutron [-] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.968018] env[62730]: INFO nova.compute.manager [-] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] Took 0.04 seconds to deallocate network for instance. [ 1997.079543] env[62730]: DEBUG oslo_concurrency.lockutils [None req-518939aa-7ea5-46ff-ad9d-8873f18ed7f9 tempest-ImagesTestJSON-485213835 tempest-ImagesTestJSON-485213835-project-member] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.080323] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 281.973s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.080514] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: a5a39785-b18a-4d18-a0af-8b4065c354f2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1997.080705] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "a5a39785-b18a-4d18-a0af-8b4065c354f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.743843] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.744248] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.516009] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.516341] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.529430] env[62730]: DEBUG nova.compute.manager [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Starting instance... {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2005.611067] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.611342] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.613172] env[62730]: INFO nova.compute.claims [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2005.674995] env[62730]: DEBUG nova.scheduler.client.report [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Refreshing inventories for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2005.688928] env[62730]: DEBUG nova.scheduler.client.report [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Updating ProviderTree inventory for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2005.689181] env[62730]: DEBUG nova.compute.provider_tree [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2005.699992] env[62730]: DEBUG nova.scheduler.client.report [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Refreshing aggregate associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, aggregates: None {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2005.718585] env[62730]: DEBUG nova.scheduler.client.report [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Refreshing trait associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2005.737514] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.737729] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.737925] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.738107] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.749281] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.859114] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44525a24-af84-4530-b22c-69a77f06fd3d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.867468] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e4f987-b322-47e9-8914-9a4a6d7eff65 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.897912] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e213715-cf5e-489e-bf56-8e1d7959597f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.905745] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b978aae5-4e84-4f2d-9d60-e169d51b7910 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.919109] env[62730]: DEBUG nova.compute.provider_tree [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2005.928146] env[62730]: DEBUG nova.scheduler.client.report [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2005.941778] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.942234] env[62730]: DEBUG nova.compute.manager [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Start building networks asynchronously for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2005.944445] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.195s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.944628] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.944786] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2005.945982] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7912cee9-59e2-4fdf-8cff-7445eca63feb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.954420] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f25c3c8-1c7a-40fc-b056-7f9f3b5ab785 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.968810] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e842d487-d470-4aad-98ff-df61407c2952 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.976219] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c599b14-4ce5-4dec-9936-ee9bf58f605b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.981580] env[62730]: DEBUG nova.compute.utils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Using /dev/sd instead of None {{(pid=62730) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2005.983067] env[62730]: DEBUG nova.compute.manager [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Allocating IP information in the background. {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2005.983246] env[62730]: DEBUG nova.network.neutron [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] allocate_for_instance() {{(pid=62730) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2006.020310] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180533MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2006.020524] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.021167] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.022707] env[62730]: DEBUG nova.compute.manager [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Start building block device mappings for instance. {{(pid=62730) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2006.073333] env[62730]: DEBUG nova.policy [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b22447bf6ab4e93a4450b13d7d9a3a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '861b7ee6cc2444678f4056271d23e872', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62730) authorize /opt/stack/nova/nova/policy.py:203}} [ 2006.097328] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.097500] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.097630] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.097745] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.097935] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.098083] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.098204] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.098320] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.098435] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.098550] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2006.098745] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2006.098949] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '15', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '7', 'num_os_type_None': '10', 'num_proj_70e4ccdd17d64e0da492ff6c4b0f79d1': '1', 'io_workload': '10', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '2', 'num_task_spawning': '2', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '2', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1', 'num_proj_f54137e1151d46fe9ba541e5e2bce843': '1', 'num_task_networking': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2006.103018] env[62730]: DEBUG nova.compute.manager [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Start spawning the instance on the hypervisor. {{(pid=62730) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2006.127529] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-11-26T09:07:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-11-26T09:07:37Z,direct_url=,disk_format='vmdk',id=a46adab9-3ef5-4b2e-8d44-bab77576ed71,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='05ec08bc94b84623a044562d4cbaee75',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-11-26T09:07:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2006.127842] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Flavor limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2006.127974] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Image limits 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2006.128189] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Flavor pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2006.128340] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Image pref 0:0:0 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2006.128493] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62730) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2006.128741] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2006.128991] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2006.129236] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Got 1 possible topologies {{(pid=62730) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2006.129419] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2006.129640] env[62730]: DEBUG nova.virt.hardware [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62730) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2006.130552] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7b7c93-f83b-4c3e-b6ed-29032d2dfb05 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.141945] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df9f40f-dd93-4efe-8c56-322bbbbf53f1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.243538] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7d9e68-279a-4114-87ee-7bfc15d968b1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.252036] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa756ea5-1bcb-4252-8dce-6f2be1d55920 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.283373] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1c856e-8ec5-46cb-b73b-3506c6b77794 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.292055] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432a8170-3dc5-45ce-9be9-beb7dbd41521 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.306021] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2006.316225] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2006.332596] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2006.333377] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.312s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.482962] env[62730]: DEBUG nova.network.neutron [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Successfully created port: 8b217ab1-6146-4e7d-b434-cb4a8cee28b5 {{(pid=62730) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2007.092831] env[62730]: DEBUG nova.compute.manager [req-3adf689c-253b-4c21-b1b8-ca41d9617909 req-afa77a36-1cb1-4070-ba20-e5d98ad61395 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Received event network-vif-plugged-8b217ab1-6146-4e7d-b434-cb4a8cee28b5 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2007.093243] env[62730]: DEBUG oslo_concurrency.lockutils [req-3adf689c-253b-4c21-b1b8-ca41d9617909 req-afa77a36-1cb1-4070-ba20-e5d98ad61395 service nova] Acquiring lock "1b7fecbe-c43d-44cc-ad0f-bd3565023cd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.093493] env[62730]: DEBUG oslo_concurrency.lockutils [req-3adf689c-253b-4c21-b1b8-ca41d9617909 req-afa77a36-1cb1-4070-ba20-e5d98ad61395 service nova] Lock "1b7fecbe-c43d-44cc-ad0f-bd3565023cd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.093668] env[62730]: DEBUG oslo_concurrency.lockutils [req-3adf689c-253b-4c21-b1b8-ca41d9617909 req-afa77a36-1cb1-4070-ba20-e5d98ad61395 service nova] Lock "1b7fecbe-c43d-44cc-ad0f-bd3565023cd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.093857] env[62730]: DEBUG nova.compute.manager [req-3adf689c-253b-4c21-b1b8-ca41d9617909 req-afa77a36-1cb1-4070-ba20-e5d98ad61395 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] No waiting events found dispatching network-vif-plugged-8b217ab1-6146-4e7d-b434-cb4a8cee28b5 {{(pid=62730) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2007.094039] env[62730]: WARNING nova.compute.manager [req-3adf689c-253b-4c21-b1b8-ca41d9617909 req-afa77a36-1cb1-4070-ba20-e5d98ad61395 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Received unexpected event network-vif-plugged-8b217ab1-6146-4e7d-b434-cb4a8cee28b5 for instance with vm_state building and task_state spawning. [ 2007.192315] env[62730]: DEBUG nova.network.neutron [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Successfully updated port: 8b217ab1-6146-4e7d-b434-cb4a8cee28b5 {{(pid=62730) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2007.216535] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "refresh_cache-1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.216725] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquired lock "refresh_cache-1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.216940] env[62730]: DEBUG nova.network.neutron [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2007.277112] env[62730]: DEBUG nova.network.neutron [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2007.332300] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.332478] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2007.332602] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2007.357963] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.358165] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.358291] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.358421] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.358548] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.358675] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.358799] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.358992] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.359050] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.359164] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2007.359284] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2007.502308] env[62730]: DEBUG nova.network.neutron [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Updating instance_info_cache with network_info: [{"id": "8b217ab1-6146-4e7d-b434-cb4a8cee28b5", "address": "fa:16:3e:99:49:81", "network": {"id": "620c1f68-972f-4380-86f9-2739c817e947", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-694792688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "861b7ee6cc2444678f4056271d23e872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b217ab1-61", "ovs_interfaceid": "8b217ab1-6146-4e7d-b434-cb4a8cee28b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.518863] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Releasing lock "refresh_cache-1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.519223] env[62730]: DEBUG nova.compute.manager [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Instance network_info: |[{"id": "8b217ab1-6146-4e7d-b434-cb4a8cee28b5", "address": "fa:16:3e:99:49:81", "network": {"id": "620c1f68-972f-4380-86f9-2739c817e947", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-694792688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "861b7ee6cc2444678f4056271d23e872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b217ab1-61", "ovs_interfaceid": "8b217ab1-6146-4e7d-b434-cb4a8cee28b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62730) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2007.520129] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:49:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ab2e9f5-54fd-4cab-9405-ed65e2aaba64', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b217ab1-6146-4e7d-b434-cb4a8cee28b5', 'vif_model': 'vmxnet3'}] {{(pid=62730) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2007.527489] env[62730]: DEBUG oslo.service.loopingcall [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2007.527979] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Creating VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2007.528222] env[62730]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a8339e7-9fa4-49ce-9f7c-01649dc215ea {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.549434] env[62730]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2007.549434] env[62730]: value = "task-4837282" [ 2007.549434] env[62730]: _type = "Task" [ 2007.549434] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.558214] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837282, 'name': CreateVM_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.061851] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837282, 'name': CreateVM_Task} progress is 99%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.561365] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837282, 'name': CreateVM_Task} progress is 99%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.062221] env[62730]: DEBUG oslo_vmware.api [-] Task: {'id': task-4837282, 'name': CreateVM_Task, 'duration_secs': 1.305635} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.062432] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Created VM on the ESX host {{(pid=62730) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2009.063094] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.063273] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.063619] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2009.063884] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f9c1a90-398c-438e-8f09-aa52c7edc731 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.068673] env[62730]: DEBUG oslo_vmware.api [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for the task: (returnval){ [ 2009.068673] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5208f76d-13e7-0d65-20b1-b0c2b4157e35" [ 2009.068673] env[62730]: _type = "Task" [ 2009.068673] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.076685] env[62730]: DEBUG oslo_vmware.api [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5208f76d-13e7-0d65-20b1-b0c2b4157e35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.117573] env[62730]: DEBUG nova.compute.manager [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Received event network-changed-8b217ab1-6146-4e7d-b434-cb4a8cee28b5 {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2009.117794] env[62730]: DEBUG nova.compute.manager [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Refreshing instance network info cache due to event network-changed-8b217ab1-6146-4e7d-b434-cb4a8cee28b5. {{(pid=62730) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2009.118032] env[62730]: DEBUG oslo_concurrency.lockutils [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] Acquiring lock "refresh_cache-1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.118185] env[62730]: DEBUG oslo_concurrency.lockutils [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] Acquired lock "refresh_cache-1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.118351] env[62730]: DEBUG nova.network.neutron [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Refreshing network info cache for port 8b217ab1-6146-4e7d-b434-cb4a8cee28b5 {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2009.376354] env[62730]: DEBUG nova.network.neutron [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Updated VIF entry in instance network info cache for port 8b217ab1-6146-4e7d-b434-cb4a8cee28b5. {{(pid=62730) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2009.376731] env[62730]: DEBUG nova.network.neutron [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Updating instance_info_cache with network_info: [{"id": "8b217ab1-6146-4e7d-b434-cb4a8cee28b5", "address": "fa:16:3e:99:49:81", "network": {"id": "620c1f68-972f-4380-86f9-2739c817e947", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-694792688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "861b7ee6cc2444678f4056271d23e872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ab2e9f5-54fd-4cab-9405-ed65e2aaba64", "external-id": "nsx-vlan-transportzone-222", "segmentation_id": 222, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b217ab1-61", "ovs_interfaceid": "8b217ab1-6146-4e7d-b434-cb4a8cee28b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.386485] env[62730]: DEBUG oslo_concurrency.lockutils [req-6fbd001c-9502-43e4-800f-abd6ad4a8165 req-cac0ed13-dcea-48a0-9a00-87676af76de2 service nova] Releasing lock "refresh_cache-1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.579324] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.579713] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Processing image a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2009.579788] env[62730]: DEBUG oslo_concurrency.lockutils [None req-850ce3d5-ac10-4a3a-b632-55c3d8fcd550 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.736843] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.737282] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.737282] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2022.737722] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.738181] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2022.748367] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] There are 0 instances to clean {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2044.969779] env[62730]: WARNING oslo_vmware.rw_handles [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2044.969779] env[62730]: ERROR oslo_vmware.rw_handles [ 2044.970375] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2044.972027] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2044.972277] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Copying Virtual Disk [datastore2] vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/a1bb10b9-7fbd-486c-bb05-b843287fd68e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2044.972557] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd391698-d0a9-4aea-aa28-34ed2396cf64 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.981436] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Waiting for the task: (returnval){ [ 2044.981436] env[62730]: value = "task-4837283" [ 2044.981436] env[62730]: _type = "Task" [ 2044.981436] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.990112] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Task: {'id': task-4837283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.491623] env[62730]: DEBUG oslo_vmware.exceptions [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2045.491922] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.492494] env[62730]: ERROR nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2045.492494] env[62730]: Faults: ['InvalidArgument'] [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Traceback (most recent call last): [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] yield resources [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self.driver.spawn(context, instance, image_meta, [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self._fetch_image_if_missing(context, vi) [ 2045.492494] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] image_cache(vi, tmp_image_ds_loc) [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] vm_util.copy_virtual_disk( [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] session._wait_for_task(vmdk_copy_task) [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] return self.wait_for_task(task_ref) [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] return evt.wait() [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] result = hub.switch() [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2045.492766] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] return self.greenlet.switch() [ 2045.493119] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2045.493119] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self.f(*self.args, **self.kw) [ 2045.493119] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2045.493119] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] raise exceptions.translate_fault(task_info.error) [ 2045.493119] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2045.493119] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Faults: ['InvalidArgument'] [ 2045.493119] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] [ 2045.493119] env[62730]: INFO nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Terminating instance [ 2045.494424] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.494635] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2045.494876] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ac4d338-53ec-44e1-96e7-5f3ea7bc4852 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.497341] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2045.497550] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2045.498308] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a66e42f-d701-4998-8668-021f443b433e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.505330] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2045.506410] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-442e7cdc-bafd-4126-a360-aa3513eb20a9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.507899] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2045.508083] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2045.508735] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae7bd6a3-51c8-4fd7-8987-0b7634459cab {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.515128] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Waiting for the task: (returnval){ [ 2045.515128] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52e3b28d-5286-df5f-a184-5c09b9910d2a" [ 2045.515128] env[62730]: _type = "Task" [ 2045.515128] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.522344] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52e3b28d-5286-df5f-a184-5c09b9910d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.579539] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2045.579777] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2045.579964] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Deleting the datastore file [datastore2] 8d18fd69-cdaf-470c-b942-cd00c66f45ea {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2045.580344] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e8512e8-f297-4775-8671-41d3c2a96369 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.587660] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Waiting for the task: (returnval){ [ 2045.587660] env[62730]: value = "task-4837285" [ 2045.587660] env[62730]: _type = "Task" [ 2045.587660] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.595775] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Task: {'id': task-4837285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.028976] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2046.029364] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Creating directory with path [datastore2] vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2046.029710] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e801a24-f323-46cb-baed-3902927b314c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.042155] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Created directory with path [datastore2] vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2046.042434] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Fetch image to [datastore2] vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2046.042693] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2046.043741] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058d794e-e4cb-4615-89d1-0f88f59f5526 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.051569] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd8332c-2a35-43d1-a02e-dfc9a552694b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.060807] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa484f42-da66-4ebe-b2ed-4f1997ab4db7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.093791] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35a188a-806a-4919-a851-40c976f3ae3e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.102623] env[62730]: DEBUG oslo_vmware.api [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Task: {'id': task-4837285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077564} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.103174] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2046.103365] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2046.103542] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2046.103721] env[62730]: INFO nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2046.105282] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-44b3ef3c-30e0-41bf-84f5-1f206c6056f6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.107228] env[62730]: DEBUG nova.compute.claims [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2046.107399] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.107643] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.136707] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2046.202027] env[62730]: DEBUG oslo_vmware.rw_handles [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2046.266764] env[62730]: DEBUG oslo_vmware.rw_handles [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2046.266995] env[62730]: DEBUG oslo_vmware.rw_handles [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2046.356135] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c5209c-04e4-4b34-b3d5-fd9fd3aa5473 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.364135] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3518ba-9f45-41a2-bb78-44e4444b3cd5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.394679] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9648c3-8f13-4788-a25a-46259b403490 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.402745] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8b997b-58fe-4eba-aff2-c388e8c2d99f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.416403] env[62730]: DEBUG nova.compute.provider_tree [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2046.424935] env[62730]: DEBUG nova.scheduler.client.report [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2046.440620] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.333s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.441162] env[62730]: ERROR nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2046.441162] env[62730]: Faults: ['InvalidArgument'] [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Traceback (most recent call last): [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self.driver.spawn(context, instance, image_meta, [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self._fetch_image_if_missing(context, vi) [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] image_cache(vi, tmp_image_ds_loc) [ 2046.441162] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] vm_util.copy_virtual_disk( [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] session._wait_for_task(vmdk_copy_task) [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] return self.wait_for_task(task_ref) [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] return evt.wait() [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] result = hub.switch() [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] return self.greenlet.switch() [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2046.441624] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] self.f(*self.args, **self.kw) [ 2046.441962] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2046.441962] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] raise exceptions.translate_fault(task_info.error) [ 2046.441962] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2046.441962] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Faults: ['InvalidArgument'] [ 2046.441962] env[62730]: ERROR nova.compute.manager [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] [ 2046.441962] env[62730]: DEBUG nova.compute.utils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2046.443299] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Build of instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea was re-scheduled: A specified parameter was not correct: fileType [ 2046.443299] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2046.443673] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2046.443888] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2046.444088] env[62730]: DEBUG nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2046.444258] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2046.817314] env[62730]: DEBUG nova.network.neutron [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2046.857372] env[62730]: INFO nova.compute.manager [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Took 0.41 seconds to deallocate network for instance. [ 2046.986230] env[62730]: INFO nova.scheduler.client.report [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Deleted allocations for instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea [ 2047.009643] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c640e5a-191b-4c68-b323-5be1b2f4ff4c tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 657.841s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.009914] env[62730]: DEBUG oslo_concurrency.lockutils [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 462.256s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.010157] env[62730]: DEBUG oslo_concurrency.lockutils [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Acquiring lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.010372] env[62730]: DEBUG oslo_concurrency.lockutils [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.010547] env[62730]: DEBUG oslo_concurrency.lockutils [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.012618] env[62730]: INFO nova.compute.manager [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Terminating instance [ 2047.014339] env[62730]: DEBUG nova.compute.manager [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2047.014536] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2047.015013] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21702f52-8eb5-4bc1-821f-e280c0b13009 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.024374] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7eda75b-523c-4ab0-bf4e-dbada7f0c226 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.056473] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8d18fd69-cdaf-470c-b942-cd00c66f45ea could not be found. [ 2047.056753] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2047.056868] env[62730]: INFO nova.compute.manager [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2047.057127] env[62730]: DEBUG oslo.service.loopingcall [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2047.057482] env[62730]: DEBUG nova.compute.manager [-] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2047.057665] env[62730]: DEBUG nova.network.neutron [-] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2047.081930] env[62730]: DEBUG nova.network.neutron [-] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.090057] env[62730]: INFO nova.compute.manager [-] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] Took 0.03 seconds to deallocate network for instance. [ 2047.194702] env[62730]: DEBUG oslo_concurrency.lockutils [None req-523e3af6-4b54-49cf-8021-ef87d35cd169 tempest-ServerActionsTestOtherB-2080524800 tempest-ServerActionsTestOtherB-2080524800-project-member] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.195661] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 332.088s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.195862] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8d18fd69-cdaf-470c-b942-cd00c66f45ea] During sync_power_state the instance has a pending task (deleting). Skip. [ 2047.196448] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "8d18fd69-cdaf-470c-b942-cd00c66f45ea" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.744481] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.737632] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.737619] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.737046] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.737305] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.737473] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.749857] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.750219] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.750338] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.750442] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2066.751995] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47d3538-e5de-4c94-b2a5-e9cc8ddefce0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.760685] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53a8052-bdcb-41d3-91ec-8d9881aaaaba {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.775387] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3e8fe6-9cda-4562-adb4-d722aa4c1f47 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.782228] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b9b4f5-862a-47a1-a8a6-c12a342d40aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.812783] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180546MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2066.812945] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.813154] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.894776] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 842e4145-ba83-48d5-8514-78532381eb2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.894944] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.895090] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.895218] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.895338] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.895458] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.895576] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.895694] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.895809] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2066.896008] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2066.896163] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '16', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '6', 'num_os_type_None': '9', 'num_proj_c6181e6d67e74692b11bddb3c1ed2779': '1', 'io_workload': '9', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '2', 'num_task_spawning': '3', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '2', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1', 'num_proj_f54137e1151d46fe9ba541e5e2bce843': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2067.012761] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7b37b5-2507-458e-8957-61f2b990f2b5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.022115] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeee51c9-0cd5-4342-85e7-d62eeb6b51a7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.051771] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e4f5b3-b8fa-436a-aee4-79ee39a2ca3a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.059611] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5f5aa8-d1d9-45c6-8f6c-f0f7c2a8d05f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.074314] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.083438] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2067.099414] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2067.099603] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.286s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.096859] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2069.117548] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2069.117762] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2069.117878] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2069.136252] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.136413] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.136549] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.136678] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.136805] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.136931] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.137068] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.137194] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.137316] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2069.137437] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2070.737736] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.738133] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2071.737623] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2092.128136] env[62730]: WARNING oslo_vmware.rw_handles [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2092.128136] env[62730]: ERROR oslo_vmware.rw_handles [ 2092.128136] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2092.130073] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2092.131081] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Copying Virtual Disk [datastore2] vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/04b9dd3d-462f-42cb-9108-d3b21625d65e/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2092.131081] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3bba3a7-c68a-4c48-bade-f44fb052c0ff {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.139751] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Waiting for the task: (returnval){ [ 2092.139751] env[62730]: value = "task-4837286" [ 2092.139751] env[62730]: _type = "Task" [ 2092.139751] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.148263] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Task: {'id': task-4837286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.649510] env[62730]: DEBUG oslo_vmware.exceptions [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2092.649857] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.650573] env[62730]: ERROR nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2092.650573] env[62730]: Faults: ['InvalidArgument'] [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Traceback (most recent call last): [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] yield resources [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self.driver.spawn(context, instance, image_meta, [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self._fetch_image_if_missing(context, vi) [ 2092.650573] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] image_cache(vi, tmp_image_ds_loc) [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] vm_util.copy_virtual_disk( [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] session._wait_for_task(vmdk_copy_task) [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] return self.wait_for_task(task_ref) [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] return evt.wait() [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] result = hub.switch() [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2092.650926] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] return self.greenlet.switch() [ 2092.651252] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2092.651252] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self.f(*self.args, **self.kw) [ 2092.651252] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2092.651252] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] raise exceptions.translate_fault(task_info.error) [ 2092.651252] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2092.651252] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Faults: ['InvalidArgument'] [ 2092.651252] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] [ 2092.651252] env[62730]: INFO nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Terminating instance [ 2092.652727] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.652940] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2092.653199] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d058c6a-9d31-470b-886a-a674bae85299 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.655518] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2092.655721] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2092.656592] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e269f54-ffee-4a76-98bd-4b22fbb9ead6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.664314] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2092.664571] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ebc6901-d230-4b11-a2ca-bca70a16c4ed {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.667093] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2092.667281] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2092.668327] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e41b1fa-37b0-4a09-b523-7f93a95bafe4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.674295] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Waiting for the task: (returnval){ [ 2092.674295] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5279d722-473e-ea90-b3e4-a38ebf6f94e5" [ 2092.674295] env[62730]: _type = "Task" [ 2092.674295] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.686410] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5279d722-473e-ea90-b3e4-a38ebf6f94e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.737496] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2092.737743] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2092.737905] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Deleting the datastore file [datastore2] 842e4145-ba83-48d5-8514-78532381eb2d {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2092.738215] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba52bd19-a675-483b-adfb-1cd301610118 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.744683] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Waiting for the task: (returnval){ [ 2092.744683] env[62730]: value = "task-4837288" [ 2092.744683] env[62730]: _type = "Task" [ 2092.744683] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.752896] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Task: {'id': task-4837288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.184854] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2093.185215] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Creating directory with path [datastore2] vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2093.185598] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98ce7cf1-7c96-4f26-9437-52be5d73cca2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.198275] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Created directory with path [datastore2] vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2093.198554] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Fetch image to [datastore2] vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2093.198710] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2093.199544] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba45a96-cae0-4b89-8672-04116477b7ac {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.207707] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fda2f68-cb43-414f-8b1e-c63590754b90 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.218069] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078e19d2-9d98-4273-9040-68c6e9f54fe0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.253626] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80c7857-4504-4825-98e8-ae584bfbc095 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.262973] env[62730]: DEBUG oslo_vmware.api [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Task: {'id': task-4837288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085764} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.263204] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ffb8ddfa-50a5-429a-b8ae-3cbb07649265 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.264944] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2093.265149] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2093.265331] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2093.265520] env[62730]: INFO nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2093.267807] env[62730]: DEBUG nova.compute.claims [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2093.268016] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.268241] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.291034] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2093.348862] env[62730]: DEBUG oslo_vmware.rw_handles [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2093.407289] env[62730]: DEBUG oslo_vmware.rw_handles [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2093.407487] env[62730]: DEBUG oslo_vmware.rw_handles [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2093.484362] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f15bc1a-857e-4d13-bedd-8ca93e08d1a8 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.492573] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177e60e1-30ef-429d-a9bd-60e4e02ba59c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.523902] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f88294-7b09-4fe7-af45-9632ddb2867f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.531592] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb12dbd-4631-4417-89c0-a19ea3aed2aa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.545353] env[62730]: DEBUG nova.compute.provider_tree [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2093.555559] env[62730]: DEBUG nova.scheduler.client.report [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2093.570934] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.303s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.571526] env[62730]: ERROR nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2093.571526] env[62730]: Faults: ['InvalidArgument'] [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Traceback (most recent call last): [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self.driver.spawn(context, instance, image_meta, [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self._fetch_image_if_missing(context, vi) [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] image_cache(vi, tmp_image_ds_loc) [ 2093.571526] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] vm_util.copy_virtual_disk( [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] session._wait_for_task(vmdk_copy_task) [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] return self.wait_for_task(task_ref) [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] return evt.wait() [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] result = hub.switch() [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] return self.greenlet.switch() [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2093.571858] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] self.f(*self.args, **self.kw) [ 2093.572247] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2093.572247] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] raise exceptions.translate_fault(task_info.error) [ 2093.572247] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2093.572247] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Faults: ['InvalidArgument'] [ 2093.572247] env[62730]: ERROR nova.compute.manager [instance: 842e4145-ba83-48d5-8514-78532381eb2d] [ 2093.572380] env[62730]: DEBUG nova.compute.utils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2093.573924] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Build of instance 842e4145-ba83-48d5-8514-78532381eb2d was re-scheduled: A specified parameter was not correct: fileType [ 2093.573924] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2093.574342] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2093.574524] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2093.574700] env[62730]: DEBUG nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2093.574873] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2094.130913] env[62730]: DEBUG nova.network.neutron [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.144751] env[62730]: INFO nova.compute.manager [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Took 0.57 seconds to deallocate network for instance. [ 2094.249082] env[62730]: INFO nova.scheduler.client.report [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Deleted allocations for instance 842e4145-ba83-48d5-8514-78532381eb2d [ 2094.273076] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5ff52d9f-683d-413d-b73a-65cf5dbeb15b tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "842e4145-ba83-48d5-8514-78532381eb2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 601.908s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.273377] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "842e4145-ba83-48d5-8514-78532381eb2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 406.785s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.273602] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Acquiring lock "842e4145-ba83-48d5-8514-78532381eb2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.273817] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "842e4145-ba83-48d5-8514-78532381eb2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.274021] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "842e4145-ba83-48d5-8514-78532381eb2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.276165] env[62730]: INFO nova.compute.manager [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Terminating instance [ 2094.278147] env[62730]: DEBUG nova.compute.manager [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2094.278374] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2094.278892] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3462d3a2-ed46-49a4-9d2d-b2e7c506d00e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.288750] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f454e48-f28b-4ab4-af0c-ec7c4b4f1e34 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.319257] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 842e4145-ba83-48d5-8514-78532381eb2d could not be found. [ 2094.319510] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2094.319710] env[62730]: INFO nova.compute.manager [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2094.319962] env[62730]: DEBUG oslo.service.loopingcall [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2094.320209] env[62730]: DEBUG nova.compute.manager [-] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2094.320305] env[62730]: DEBUG nova.network.neutron [-] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2094.352627] env[62730]: DEBUG nova.network.neutron [-] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.360811] env[62730]: INFO nova.compute.manager [-] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] Took 0.04 seconds to deallocate network for instance. [ 2094.453369] env[62730]: DEBUG oslo_concurrency.lockutils [None req-5fb30299-d9bd-4942-a61a-f913b3b8618a tempest-ServersTestJSON-1151127865 tempest-ServersTestJSON-1151127865-project-member] Lock "842e4145-ba83-48d5-8514-78532381eb2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.454222] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "842e4145-ba83-48d5-8514-78532381eb2d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 379.346s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.454413] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 842e4145-ba83-48d5-8514-78532381eb2d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2094.454591] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "842e4145-ba83-48d5-8514-78532381eb2d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.732896] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2124.739116] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2125.808103] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "c964b0fe-e985-4f24-a57d-3fa31e73e815" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.736764] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.737143] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.737180] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.751953] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.752205] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.752382] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.752544] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2127.753668] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52948293-1ee8-4cff-b1ef-31ca7ec55930 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.763025] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c922fc-3e70-4a58-9b24-b390ad8fdb3a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.778076] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50e90db-9873-4758-87aa-4412f4e71d80 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.785152] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fabb65-db76-4926-970c-52263301a097 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.816908] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180541MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2127.816908] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.817191] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.886998] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance adc5639c-773e-4deb-9387-004833e94507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.887265] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.887459] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.887589] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.887755] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.887912] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.888061] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.888185] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2127.888383] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2127.888535] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] stats={'failed_builds': '17', 'num_instances': '8', 'num_vm_building': '8', 'num_task_deleting': '6', 'num_os_type_None': '8', 'num_proj_350dbc45d12e4bd3a2bd888b484b3173': '1', 'io_workload': '8', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '2', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '2', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1', 'num_task_spawning': '2', 'num_proj_f54137e1151d46fe9ba541e5e2bce843': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2127.997018] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a80130-373c-416f-ae45-d424c1d84c50 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.006764] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae472e2c-7173-405b-8fc8-ebaec4f67437 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.036660] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad91b2f-61b3-4654-a50c-c0a06dd74d88 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.044576] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b60313-706a-4641-92e9-48e930c2ac67 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.057794] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2128.066391] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2128.080077] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2128.080292] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.263s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.081259] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2129.081608] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2129.081608] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2129.101020] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101020] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101020] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101020] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101230] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101408] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101545] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101678] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2129.101803] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2129.102344] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.737507] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.737896] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.737999] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2141.364552] env[62730]: WARNING oslo_vmware.rw_handles [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2141.364552] env[62730]: ERROR oslo_vmware.rw_handles [ 2141.366147] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2141.366980] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2141.367256] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Copying Virtual Disk [datastore2] vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/99ad3587-aad4-4c4f-bbdb-641988b4b5c3/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2141.367562] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47d2032b-eefc-41d1-ac58-ff4f1c1ff7ed {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.376419] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Waiting for the task: (returnval){ [ 2141.376419] env[62730]: value = "task-4837289" [ 2141.376419] env[62730]: _type = "Task" [ 2141.376419] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.385366] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Task: {'id': task-4837289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.887403] env[62730]: DEBUG oslo_vmware.exceptions [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2141.887697] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2141.888286] env[62730]: ERROR nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2141.888286] env[62730]: Faults: ['InvalidArgument'] [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] Traceback (most recent call last): [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] yield resources [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self.driver.spawn(context, instance, image_meta, [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self._fetch_image_if_missing(context, vi) [ 2141.888286] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] image_cache(vi, tmp_image_ds_loc) [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] vm_util.copy_virtual_disk( [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] session._wait_for_task(vmdk_copy_task) [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] return self.wait_for_task(task_ref) [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] return evt.wait() [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] result = hub.switch() [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2141.888617] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] return self.greenlet.switch() [ 2141.888920] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2141.888920] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self.f(*self.args, **self.kw) [ 2141.888920] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2141.888920] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] raise exceptions.translate_fault(task_info.error) [ 2141.888920] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2141.888920] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] Faults: ['InvalidArgument'] [ 2141.888920] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] [ 2141.888920] env[62730]: INFO nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Terminating instance [ 2141.890228] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2141.890440] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2141.890681] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98b75730-dbda-4d27-a53f-c370605d1884 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.893098] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2141.893297] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2141.894026] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf34eb4-a0e4-401e-b319-1d96a57a03f1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.901331] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2141.901550] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17cb37cf-4825-4fe0-88d9-ab7b6256957b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.903822] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2141.903998] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2141.904985] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-967b0473-4e82-4ff2-bc1f-03ea84634377 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.910499] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Waiting for the task: (returnval){ [ 2141.910499] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52a3f194-8d42-69d6-c6b8-12619b226378" [ 2141.910499] env[62730]: _type = "Task" [ 2141.910499] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.918411] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52a3f194-8d42-69d6-c6b8-12619b226378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.979023] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2141.979285] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2141.979477] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Deleting the datastore file [datastore2] adc5639c-773e-4deb-9387-004833e94507 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2141.979762] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f849900-a55f-4ba7-8e09-19ccf23badfd {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.986805] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Waiting for the task: (returnval){ [ 2141.986805] env[62730]: value = "task-4837291" [ 2141.986805] env[62730]: _type = "Task" [ 2141.986805] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.995333] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Task: {'id': task-4837291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.421710] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2142.422148] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Creating directory with path [datastore2] vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2142.422226] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a4bbd6d-d8fe-43ed-ad79-504021dbe5ea {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.433894] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Created directory with path [datastore2] vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2142.434112] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Fetch image to [datastore2] vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2142.434290] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2142.435073] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1fc5e3-7161-4e8d-96bd-13bb04501d9b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.442650] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2205ae28-161d-46de-aa44-e50dd1ecb5c7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.452310] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6002eab-d2a1-4f50-81cd-9ad21e871109 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.484453] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfe0ec5-4052-4bf6-9beb-5b6888959d7f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.492362] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9b54e388-ac07-40ef-b394-50a9bd195d65 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.496614] env[62730]: DEBUG oslo_vmware.api [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Task: {'id': task-4837291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071253} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.497181] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2142.497375] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2142.497573] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2142.497753] env[62730]: INFO nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2142.499926] env[62730]: DEBUG nova.compute.claims [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2142.500107] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.500330] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.516234] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2142.632658] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2142.693639] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2142.693754] env[62730]: DEBUG oslo_vmware.rw_handles [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2142.719323] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2470305-f64c-46af-b516-811c150d1311 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.727873] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339383fb-782c-4ad6-bdfe-72df1e91823e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.758264] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82d3c76-44bf-4ea6-9209-8b52441f933a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.766638] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fc03c3-c601-4cc3-ace2-eb69190ecd73 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.780668] env[62730]: DEBUG nova.compute.provider_tree [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2142.792117] env[62730]: DEBUG nova.scheduler.client.report [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2142.808211] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.808777] env[62730]: ERROR nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2142.808777] env[62730]: Faults: ['InvalidArgument'] [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] Traceback (most recent call last): [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self.driver.spawn(context, instance, image_meta, [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self._fetch_image_if_missing(context, vi) [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] image_cache(vi, tmp_image_ds_loc) [ 2142.808777] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] vm_util.copy_virtual_disk( [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] session._wait_for_task(vmdk_copy_task) [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] return self.wait_for_task(task_ref) [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] return evt.wait() [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] result = hub.switch() [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] return self.greenlet.switch() [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2142.809331] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] self.f(*self.args, **self.kw) [ 2142.809884] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2142.809884] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] raise exceptions.translate_fault(task_info.error) [ 2142.809884] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2142.809884] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] Faults: ['InvalidArgument'] [ 2142.809884] env[62730]: ERROR nova.compute.manager [instance: adc5639c-773e-4deb-9387-004833e94507] [ 2142.809884] env[62730]: DEBUG nova.compute.utils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2142.811385] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Build of instance adc5639c-773e-4deb-9387-004833e94507 was re-scheduled: A specified parameter was not correct: fileType [ 2142.811385] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2142.811803] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2142.811982] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2142.812175] env[62730]: DEBUG nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2142.812340] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2143.239761] env[62730]: DEBUG nova.network.neutron [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.256317] env[62730]: INFO nova.compute.manager [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Took 0.44 seconds to deallocate network for instance. [ 2143.372829] env[62730]: INFO nova.scheduler.client.report [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Deleted allocations for instance adc5639c-773e-4deb-9387-004833e94507 [ 2143.402871] env[62730]: DEBUG oslo_concurrency.lockutils [None req-9457d144-7f40-4a9a-8b83-d0abf99ed7a9 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "adc5639c-773e-4deb-9387-004833e94507" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 560.653s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.404028] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "adc5639c-773e-4deb-9387-004833e94507" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 428.295s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.404028] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: adc5639c-773e-4deb-9387-004833e94507] During sync_power_state the instance has a pending task (spawning). Skip. [ 2143.404028] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "adc5639c-773e-4deb-9387-004833e94507" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.404434] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "adc5639c-773e-4deb-9387-004833e94507" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 363.719s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.404434] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Acquiring lock "adc5639c-773e-4deb-9387-004833e94507-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.404850] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "adc5639c-773e-4deb-9387-004833e94507-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.404850] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "adc5639c-773e-4deb-9387-004833e94507-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.407157] env[62730]: INFO nova.compute.manager [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Terminating instance [ 2143.409659] env[62730]: DEBUG nova.compute.manager [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2143.409870] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2143.410199] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bc6e7b1-a936-4b9f-811b-8a5d4b4d3396 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.421184] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ebed28-1643-4878-a121-2d95474e2499 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.454726] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance adc5639c-773e-4deb-9387-004833e94507 could not be found. [ 2143.455054] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2143.455139] env[62730]: INFO nova.compute.manager [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] [instance: adc5639c-773e-4deb-9387-004833e94507] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2143.455411] env[62730]: DEBUG oslo.service.loopingcall [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2143.455713] env[62730]: DEBUG nova.compute.manager [-] [instance: adc5639c-773e-4deb-9387-004833e94507] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2143.455772] env[62730]: DEBUG nova.network.neutron [-] [instance: adc5639c-773e-4deb-9387-004833e94507] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2143.486870] env[62730]: DEBUG nova.network.neutron [-] [instance: adc5639c-773e-4deb-9387-004833e94507] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.495855] env[62730]: INFO nova.compute.manager [-] [instance: adc5639c-773e-4deb-9387-004833e94507] Took 0.04 seconds to deallocate network for instance. [ 2143.624790] env[62730]: DEBUG oslo_concurrency.lockutils [None req-8ca0d7d9-18ab-4fa5-922b-3aef7e2f1332 tempest-ServerPasswordTestJSON-1042750684 tempest-ServerPasswordTestJSON-1042750684-project-member] Lock "adc5639c-773e-4deb-9387-004833e94507" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2177.833069] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3d1dba12-a9db-4abf-8815-4374db5e85b6 tempest-ServerRescueTestJSONUnderV235-953149682 tempest-ServerRescueTestJSONUnderV235-953149682-project-member] Acquiring lock "8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.733668] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2184.738667] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.733020] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.755347] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.755645] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.767536] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.767772] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.767943] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.768121] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2188.769267] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232d6784-d38d-4996-919b-f639a435b5ec {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.778217] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870fca9e-e78a-44b3-ad79-a42f94f61ec2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.792079] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ac872d-d802-44d0-b2fd-73dbdc72557b {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.798456] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181e7c7d-7bc1-4652-a373-25423e0c8a13 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.826622] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180542MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2188.826737] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.826924] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.891046] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 4eeba36c-efe6-4050-953f-75669079a0e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2188.891246] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2188.891383] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2188.891509] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2188.891629] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2188.891747] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2188.891867] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2188.892066] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2188.892227] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '18', 'num_instances': '7', 'num_vm_building': '7', 'num_task_deleting': '6', 'num_os_type_None': '7', 'num_proj_c9f07569d97748e88c6a7840147de664': '1', 'io_workload': '7', 'num_proj_861b7ee6cc2444678f4056271d23e872': '2', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '2', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1', 'num_proj_f54137e1151d46fe9ba541e5e2bce843': '1', 'num_task_spawning': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2188.985190] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eaadf11-0e29-4297-814b-fab04b896ffe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.994354] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3076789a-5775-47f0-9fd4-3c513d0d0f64 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.026016] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fd1300-55f5-4488-b518-18f3801774f7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.033919] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6b79b2-02de-4723-992b-48b95c9e2bd3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.047724] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.057928] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2189.073389] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2189.073579] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.247s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.022620] env[62730]: WARNING oslo_vmware.rw_handles [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2190.022620] env[62730]: ERROR oslo_vmware.rw_handles [ 2190.023584] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2190.025080] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2190.025332] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Copying Virtual Disk [datastore2] vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/ab0826a1-ec84-4fba-8430-1665b023373c/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2190.025668] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6132a4a7-516e-44be-a3fa-a523e419ee07 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.033825] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Waiting for the task: (returnval){ [ 2190.033825] env[62730]: value = "task-4837292" [ 2190.033825] env[62730]: _type = "Task" [ 2190.033825] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.042573] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Task: {'id': task-4837292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.055161] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2190.055334] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2190.055441] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2190.073217] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2190.073451] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2190.073517] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2190.073610] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2190.073730] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2190.073854] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2190.073976] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2190.074171] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2190.074609] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2190.543918] env[62730]: DEBUG oslo_vmware.exceptions [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2190.544253] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2190.544811] env[62730]: ERROR nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2190.544811] env[62730]: Faults: ['InvalidArgument'] [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Traceback (most recent call last): [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] yield resources [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self.driver.spawn(context, instance, image_meta, [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self._fetch_image_if_missing(context, vi) [ 2190.544811] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] image_cache(vi, tmp_image_ds_loc) [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] vm_util.copy_virtual_disk( [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] session._wait_for_task(vmdk_copy_task) [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] return self.wait_for_task(task_ref) [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] return evt.wait() [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] result = hub.switch() [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2190.545166] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] return self.greenlet.switch() [ 2190.545482] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2190.545482] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self.f(*self.args, **self.kw) [ 2190.545482] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2190.545482] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] raise exceptions.translate_fault(task_info.error) [ 2190.545482] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2190.545482] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Faults: ['InvalidArgument'] [ 2190.545482] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] [ 2190.545482] env[62730]: INFO nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Terminating instance [ 2190.546720] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2190.546932] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2190.547189] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17f1db99-05c0-43ff-8741-22fbe7053776 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.549541] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2190.549734] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2190.550453] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff946db1-9921-4c33-bc56-90afb473923c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.557152] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2190.557396] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad45fbf1-f477-407a-9a36-04bff1afcd7c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.559503] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2190.559683] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2190.560655] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b5414ab-2722-40ff-a621-9a68346f677d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.566641] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for the task: (returnval){ [ 2190.566641] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52618b73-e19e-eedd-d327-f30b1ce6c77c" [ 2190.566641] env[62730]: _type = "Task" [ 2190.566641] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.573701] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52618b73-e19e-eedd-d327-f30b1ce6c77c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.628444] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2190.628692] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2190.628830] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Deleting the datastore file [datastore2] 4eeba36c-efe6-4050-953f-75669079a0e0 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2190.629138] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e6fba1d-9a7c-4a7d-8066-d069f648d349 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.636733] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Waiting for the task: (returnval){ [ 2190.636733] env[62730]: value = "task-4837294" [ 2190.636733] env[62730]: _type = "Task" [ 2190.636733] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.645814] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Task: {'id': task-4837294, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.737560] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.076620] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2191.076967] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Creating directory with path [datastore2] vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2191.077109] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eaab0b32-4fb3-45f6-a888-f196ebcf1342 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.089106] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Created directory with path [datastore2] vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2191.089327] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Fetch image to [datastore2] vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2191.089462] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2191.090274] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06574c6b-3efe-4981-b960-2e53a6539e7f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.097497] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ce5775-23e3-4c86-8c31-07803520da00 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.107356] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bab6b9-e0f5-4390-a474-9fc6241a3e52 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.140893] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e75d2fc-b6a7-4d8d-866e-6b35629bc4d7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.148437] env[62730]: DEBUG oslo_vmware.api [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Task: {'id': task-4837294, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078695} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.149975] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2191.150179] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2191.150358] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2191.150535] env[62730]: INFO nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2191.152454] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-705979cb-129b-4be4-bbc8-4b08f2c7db5a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.154339] env[62730]: DEBUG nova.compute.claims [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2191.154512] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.154727] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.180056] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2191.234366] env[62730]: DEBUG oslo_vmware.rw_handles [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2191.295680] env[62730]: DEBUG oslo_vmware.rw_handles [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2191.295873] env[62730]: DEBUG oslo_vmware.rw_handles [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2191.341045] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee57d35-ea6f-45a6-8eb2-c90b6b4ae6b3 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.348920] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e88144c-4a80-441c-ba3d-fbb5d4789eb1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.379959] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34878f8-f855-4228-b1e2-49811bb4d59c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.388612] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f63610-69a9-42f2-818a-71b41b400d63 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.403996] env[62730]: DEBUG nova.compute.provider_tree [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2191.415405] env[62730]: DEBUG nova.scheduler.client.report [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2191.434015] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.279s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.434642] env[62730]: ERROR nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2191.434642] env[62730]: Faults: ['InvalidArgument'] [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Traceback (most recent call last): [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self.driver.spawn(context, instance, image_meta, [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self._fetch_image_if_missing(context, vi) [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] image_cache(vi, tmp_image_ds_loc) [ 2191.434642] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] vm_util.copy_virtual_disk( [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] session._wait_for_task(vmdk_copy_task) [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] return self.wait_for_task(task_ref) [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] return evt.wait() [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] result = hub.switch() [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] return self.greenlet.switch() [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2191.435091] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] self.f(*self.args, **self.kw) [ 2191.435391] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2191.435391] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] raise exceptions.translate_fault(task_info.error) [ 2191.435391] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2191.435391] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Faults: ['InvalidArgument'] [ 2191.435391] env[62730]: ERROR nova.compute.manager [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] [ 2191.435391] env[62730]: DEBUG nova.compute.utils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2191.436995] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Build of instance 4eeba36c-efe6-4050-953f-75669079a0e0 was re-scheduled: A specified parameter was not correct: fileType [ 2191.436995] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2191.437414] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2191.437591] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2191.437767] env[62730]: DEBUG nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2191.437932] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2191.737654] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.737843] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2191.768555] env[62730]: DEBUG nova.network.neutron [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2191.782480] env[62730]: INFO nova.compute.manager [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Took 0.34 seconds to deallocate network for instance. [ 2191.893534] env[62730]: INFO nova.scheduler.client.report [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Deleted allocations for instance 4eeba36c-efe6-4050-953f-75669079a0e0 [ 2191.920190] env[62730]: DEBUG oslo_concurrency.lockutils [None req-3a514549-66c7-4ed5-9faa-30078ade31d4 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "4eeba36c-efe6-4050-953f-75669079a0e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 593.038s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.920427] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "4eeba36c-efe6-4050-953f-75669079a0e0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 476.812s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.920620] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] During sync_power_state the instance has a pending task (spawning). Skip. [ 2191.920797] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "4eeba36c-efe6-4050-953f-75669079a0e0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.921370] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "4eeba36c-efe6-4050-953f-75669079a0e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 396.626s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.921599] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Acquiring lock "4eeba36c-efe6-4050-953f-75669079a0e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.921813] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "4eeba36c-efe6-4050-953f-75669079a0e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.921984] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "4eeba36c-efe6-4050-953f-75669079a0e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.923977] env[62730]: INFO nova.compute.manager [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Terminating instance [ 2191.925687] env[62730]: DEBUG nova.compute.manager [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2191.925882] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2191.926149] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58c5dcfb-36f4-4947-9922-d0811fcf27d9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.935743] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d08b98e-570a-4faf-bf66-644805746845 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.969113] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4eeba36c-efe6-4050-953f-75669079a0e0 could not be found. [ 2191.969113] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2191.969113] env[62730]: INFO nova.compute.manager [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2191.969113] env[62730]: DEBUG oslo.service.loopingcall [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2191.969113] env[62730]: DEBUG nova.compute.manager [-] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2191.969356] env[62730]: DEBUG nova.network.neutron [-] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2192.012434] env[62730]: DEBUG nova.network.neutron [-] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2192.020768] env[62730]: INFO nova.compute.manager [-] [instance: 4eeba36c-efe6-4050-953f-75669079a0e0] Took 0.05 seconds to deallocate network for instance. [ 2192.118428] env[62730]: DEBUG oslo_concurrency.lockutils [None req-0983844b-be73-4bc7-b641-6f3c89bf5d13 tempest-ServersTestJSON-1465974173 tempest-ServersTestJSON-1465974173-project-member] Lock "4eeba36c-efe6-4050-953f-75669079a0e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.738680] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.319557] env[62730]: DEBUG oslo_concurrency.lockutils [None req-67f2c822-77d5-4690-8853-66944dc04062 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2238.531991] env[62730]: WARNING oslo_vmware.rw_handles [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2238.531991] env[62730]: ERROR oslo_vmware.rw_handles [ 2238.532623] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2238.534902] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2238.535739] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Copying Virtual Disk [datastore2] vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/288e9299-3648-4bc8-a37f-14bb159e7762/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2238.535739] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7daa9b18-6f84-4232-bf00-a138d3adbc7d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.544215] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for the task: (returnval){ [ 2238.544215] env[62730]: value = "task-4837295" [ 2238.544215] env[62730]: _type = "Task" [ 2238.544215] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.552517] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Task: {'id': task-4837295, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.054638] env[62730]: DEBUG oslo_vmware.exceptions [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2239.054944] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2239.055519] env[62730]: ERROR nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2239.055519] env[62730]: Faults: ['InvalidArgument'] [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Traceback (most recent call last): [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] yield resources [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self.driver.spawn(context, instance, image_meta, [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self._fetch_image_if_missing(context, vi) [ 2239.055519] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] image_cache(vi, tmp_image_ds_loc) [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] vm_util.copy_virtual_disk( [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] session._wait_for_task(vmdk_copy_task) [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] return self.wait_for_task(task_ref) [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] return evt.wait() [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] result = hub.switch() [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2239.055886] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] return self.greenlet.switch() [ 2239.056210] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2239.056210] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self.f(*self.args, **self.kw) [ 2239.056210] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2239.056210] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] raise exceptions.translate_fault(task_info.error) [ 2239.056210] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2239.056210] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Faults: ['InvalidArgument'] [ 2239.056210] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] [ 2239.056210] env[62730]: INFO nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Terminating instance [ 2239.057422] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.057634] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2239.057875] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1908485d-ae51-4de5-bd45-fae682539b96 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.060248] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2239.060449] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2239.061174] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e08a3cc-c673-42ae-a79c-67d5f66385fe {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.068325] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2239.068542] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce43d9c1-f983-4cc9-90e6-f64b6d82cc0a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.070696] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2239.070875] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2239.071836] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-286797d0-77d9-4cfa-8146-57fe7f4482d2 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.077850] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 2239.077850] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52661b6e-e9c7-107f-6388-36b9391a3dcc" [ 2239.077850] env[62730]: _type = "Task" [ 2239.077850] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.085669] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52661b6e-e9c7-107f-6388-36b9391a3dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.138461] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2239.138683] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2239.138941] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Deleting the datastore file [datastore2] c1dcad10-0c5a-4aca-8870-42569cfd4448 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2239.139266] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1a7d7e4-63c9-47ae-8dca-1a91e30b82ae {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.146227] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for the task: (returnval){ [ 2239.146227] env[62730]: value = "task-4837297" [ 2239.146227] env[62730]: _type = "Task" [ 2239.146227] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.154504] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Task: {'id': task-4837297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.590031] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2239.590031] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating directory with path [datastore2] vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2239.590438] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-877da378-2caa-47bd-bd5b-9d23e46e75dc {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.602548] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Created directory with path [datastore2] vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2239.602759] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Fetch image to [datastore2] vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2239.602939] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2239.603727] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d7565b-4464-4ee8-abb4-e6f3f7c1e0d5 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.611121] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c249b663-d131-4812-9427-67ac34a138ce {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.620400] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf718edf-539e-4164-a2fd-5eec8cc5b86e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.653326] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c6dd17-5706-49a6-8a64-7978cb50e5b4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.661926] env[62730]: DEBUG oslo_vmware.api [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Task: {'id': task-4837297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077349} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2239.662135] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c51a1fcd-eec2-4cfe-aeb6-9bedbc0a0510 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.663807] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2239.663997] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2239.664191] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2239.664374] env[62730]: INFO nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2239.666583] env[62730]: DEBUG nova.compute.claims [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2239.666756] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.666967] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2239.684382] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2239.737978] env[62730]: DEBUG oslo_vmware.rw_handles [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2239.795832] env[62730]: DEBUG oslo_vmware.rw_handles [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2239.796031] env[62730]: DEBUG oslo_vmware.rw_handles [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2239.852556] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4572b8e-9b90-47bb-a792-a44c6677a2c0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.860108] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a76de4-5781-4e94-89e3-15701f959a60 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.892804] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde69920-6059-4f06-b901-2945e12d034f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.900523] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f93b77c-befa-47a2-acad-5c8addd74f08 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.914014] env[62730]: DEBUG nova.compute.provider_tree [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2239.924424] env[62730]: DEBUG nova.scheduler.client.report [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2239.938558] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.271s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2239.939103] env[62730]: ERROR nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2239.939103] env[62730]: Faults: ['InvalidArgument'] [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Traceback (most recent call last): [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self.driver.spawn(context, instance, image_meta, [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self._fetch_image_if_missing(context, vi) [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] image_cache(vi, tmp_image_ds_loc) [ 2239.939103] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] vm_util.copy_virtual_disk( [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] session._wait_for_task(vmdk_copy_task) [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] return self.wait_for_task(task_ref) [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] return evt.wait() [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] result = hub.switch() [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] return self.greenlet.switch() [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2239.939450] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] self.f(*self.args, **self.kw) [ 2239.939788] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2239.939788] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] raise exceptions.translate_fault(task_info.error) [ 2239.939788] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2239.939788] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Faults: ['InvalidArgument'] [ 2239.939788] env[62730]: ERROR nova.compute.manager [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] [ 2239.939788] env[62730]: DEBUG nova.compute.utils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2239.941301] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Build of instance c1dcad10-0c5a-4aca-8870-42569cfd4448 was re-scheduled: A specified parameter was not correct: fileType [ 2239.941301] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2239.941665] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2239.941843] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2239.942030] env[62730]: DEBUG nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2239.942202] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2240.257625] env[62730]: DEBUG nova.network.neutron [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2240.274996] env[62730]: INFO nova.compute.manager [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Took 0.33 seconds to deallocate network for instance. [ 2240.379911] env[62730]: INFO nova.scheduler.client.report [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Deleted allocations for instance c1dcad10-0c5a-4aca-8870-42569cfd4448 [ 2240.404545] env[62730]: DEBUG oslo_concurrency.lockutils [None req-e563b849-3470-4145-a2b9-20784857cc91 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.447s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2240.404844] env[62730]: DEBUG oslo_concurrency.lockutils [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.786s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2240.405091] env[62730]: DEBUG oslo_concurrency.lockutils [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Acquiring lock "c1dcad10-0c5a-4aca-8870-42569cfd4448-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2240.405307] env[62730]: DEBUG oslo_concurrency.lockutils [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2240.405479] env[62730]: DEBUG oslo_concurrency.lockutils [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2240.407841] env[62730]: INFO nova.compute.manager [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Terminating instance [ 2240.409510] env[62730]: DEBUG nova.compute.manager [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2240.409593] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2240.410085] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0f1054f-7063-4182-b749-9e1be95a5e9a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.421120] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca96a43-e825-4f0b-8bd6-b0d67209ef62 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.450985] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1dcad10-0c5a-4aca-8870-42569cfd4448 could not be found. [ 2240.451232] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2240.451415] env[62730]: INFO nova.compute.manager [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2240.451678] env[62730]: DEBUG oslo.service.loopingcall [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2240.451911] env[62730]: DEBUG nova.compute.manager [-] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2240.452018] env[62730]: DEBUG nova.network.neutron [-] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2240.482963] env[62730]: DEBUG nova.network.neutron [-] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2240.493466] env[62730]: INFO nova.compute.manager [-] [instance: c1dcad10-0c5a-4aca-8870-42569cfd4448] Took 0.04 seconds to deallocate network for instance. [ 2240.586031] env[62730]: DEBUG oslo_concurrency.lockutils [None req-dbbbbb33-89c9-4914-8885-7c9fe9179838 tempest-DeleteServersTestJSON-1380569159 tempest-DeleteServersTestJSON-1380569159-project-member] Lock "c1dcad10-0c5a-4aca-8870-42569cfd4448" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.733334] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2245.738211] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2248.736694] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2248.749784] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.749784] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.749943] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.750126] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2248.751386] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68eb911c-6663-468e-8b12-7e167061dc82 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.760561] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d7422a-4b31-4e7e-906d-bdaec004ea0c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.775189] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2bcb6e-c6fb-46c3-a860-c552492ae02a {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.782084] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac0f9cb-5d4e-4f9f-aae4-df61a3e5c7ef {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.812895] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180549MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2248.813271] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.813271] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.874863] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2248.875054] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2248.875198] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2248.875349] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2248.875476] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2248.875667] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2248.876015] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] stats={'failed_builds': '20', 'num_instances': '5', 'num_vm_building': '5', 'num_task_deleting': '5', 'num_os_type_None': '5', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '2', 'io_workload': '5', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1', 'num_proj_f54137e1151d46fe9ba541e5e2bce843': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2248.947835] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829530cd-efad-4b32-8042-63366177ae89 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.955823] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee95c0b2-a54b-4486-9e54-09edc2e04caa {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.985543] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e5f8ef-2e05-4476-a3f1-39f02b5abeb7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.993627] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bdf2db-2ac7-4d38-b073-0dde08d2a24c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.007454] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2249.016078] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2249.030807] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2249.031008] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.218s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.032516] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2250.737889] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.737269] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.737664] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2251.737664] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2251.752996] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2251.753178] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2251.753304] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2251.753436] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2251.753557] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2251.753703] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2251.754222] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.754412] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.754559] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2253.737678] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2285.060445] env[62730]: WARNING oslo_vmware.rw_handles [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2285.060445] env[62730]: ERROR oslo_vmware.rw_handles [ 2285.061128] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2285.062749] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2285.062998] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Copying Virtual Disk [datastore2] vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/ba352aae-d1a7-4037-85c3-1a5b8f98de9d/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2285.063314] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1f821eb-07d7-4f0a-8a42-de175678bca1 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.071706] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 2285.071706] env[62730]: value = "task-4837298" [ 2285.071706] env[62730]: _type = "Task" [ 2285.071706] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.080497] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': task-4837298, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.583334] env[62730]: DEBUG oslo_vmware.exceptions [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2285.583665] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2285.584266] env[62730]: ERROR nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2285.584266] env[62730]: Faults: ['InvalidArgument'] [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Traceback (most recent call last): [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] yield resources [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self.driver.spawn(context, instance, image_meta, [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self._fetch_image_if_missing(context, vi) [ 2285.584266] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] image_cache(vi, tmp_image_ds_loc) [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] vm_util.copy_virtual_disk( [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] session._wait_for_task(vmdk_copy_task) [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] return self.wait_for_task(task_ref) [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] return evt.wait() [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] result = hub.switch() [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2285.584648] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] return self.greenlet.switch() [ 2285.584970] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2285.584970] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self.f(*self.args, **self.kw) [ 2285.584970] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2285.584970] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] raise exceptions.translate_fault(task_info.error) [ 2285.584970] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2285.584970] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Faults: ['InvalidArgument'] [ 2285.584970] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] [ 2285.584970] env[62730]: INFO nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Terminating instance [ 2285.586883] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2285.586883] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2285.586883] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b512f7a7-29fe-4656-9d7d-7b3345ac1f3d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.589280] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2285.589447] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2285.589626] env[62730]: DEBUG nova.network.neutron [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2285.598798] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2285.599009] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2285.600446] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d7b9db0-5b02-4bc5-bf8f-70fc5db64e88 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.609976] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 2285.609976] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5293661b-8255-1216-708f-410634e3ee32" [ 2285.609976] env[62730]: _type = "Task" [ 2285.609976] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.618848] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': session[5240f961-c89e-a67c-453d-ee41aa8bafbf]5293661b-8255-1216-708f-410634e3ee32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.651681] env[62730]: DEBUG nova.network.neutron [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2285.724931] env[62730]: DEBUG nova.network.neutron [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2285.735859] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2285.736302] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2285.736502] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2285.737699] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71896c8-8b01-494d-9fcb-93b8bcb99a52 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.746200] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2285.746472] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-672c6954-42b8-4a0b-b71a-7db6e161a623 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.788694] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2285.788694] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2285.788694] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Deleting the datastore file [datastore2] c964b0fe-e985-4f24-a57d-3fa31e73e815 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2285.788909] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-687d6edf-ea25-4e66-a71a-b591be4faf5e {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.795957] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 2285.795957] env[62730]: value = "task-4837300" [ 2285.795957] env[62730]: _type = "Task" [ 2285.795957] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.805077] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': task-4837300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.120884] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2286.121237] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating directory with path [datastore2] vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2286.122304] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-447abbec-41cd-4f55-9f1e-22a8452fa881 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.133358] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Created directory with path [datastore2] vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2286.133634] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Fetch image to [datastore2] vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2286.133892] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2286.134682] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3730b87e-5897-4bfa-bcfa-fa3a2a2b658c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.142223] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08183faf-46dd-4d4a-b6fd-fdce24c7d688 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.151991] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e528ac3-2bab-467b-af83-e1dbe837f227 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.184170] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72473eac-b549-4ad8-8ca0-ced6b556af70 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.190914] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6157584a-2092-4e94-98ec-fba530d9cb53 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.214304] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2286.266628] env[62730]: DEBUG oslo_vmware.rw_handles [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2286.326804] env[62730]: DEBUG oslo_vmware.rw_handles [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2286.326984] env[62730]: DEBUG oslo_vmware.rw_handles [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2286.331478] env[62730]: DEBUG oslo_vmware.api [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': task-4837300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036127} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.331731] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2286.331916] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2286.332117] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2286.332288] env[62730]: INFO nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2286.332536] env[62730]: DEBUG oslo.service.loopingcall [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2286.332777] env[62730]: DEBUG nova.compute.manager [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2286.335063] env[62730]: DEBUG nova.compute.claims [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2286.335236] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2286.335452] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.465683] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94439c6-d158-48ce-b04f-34f1cd76c310 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.473411] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163fb6bf-3eec-4659-af75-c0b6d0f79e68 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.504262] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856e49e3-e67e-4f44-b450-75aa6ce9e731 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.512332] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69063d7f-d960-4407-bc33-2c73526e40d6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.527100] env[62730]: DEBUG nova.compute.provider_tree [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2286.537086] env[62730]: DEBUG nova.scheduler.client.report [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2286.550907] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.215s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.551462] env[62730]: ERROR nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2286.551462] env[62730]: Faults: ['InvalidArgument'] [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Traceback (most recent call last): [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self.driver.spawn(context, instance, image_meta, [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self._fetch_image_if_missing(context, vi) [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] image_cache(vi, tmp_image_ds_loc) [ 2286.551462] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] vm_util.copy_virtual_disk( [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] session._wait_for_task(vmdk_copy_task) [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] return self.wait_for_task(task_ref) [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] return evt.wait() [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] result = hub.switch() [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] return self.greenlet.switch() [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2286.551882] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] self.f(*self.args, **self.kw) [ 2286.552244] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2286.552244] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] raise exceptions.translate_fault(task_info.error) [ 2286.552244] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2286.552244] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Faults: ['InvalidArgument'] [ 2286.552244] env[62730]: ERROR nova.compute.manager [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] [ 2286.552244] env[62730]: DEBUG nova.compute.utils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2286.553798] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Build of instance c964b0fe-e985-4f24-a57d-3fa31e73e815 was re-scheduled: A specified parameter was not correct: fileType [ 2286.553798] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2286.554180] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2286.554411] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.554597] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.554770] env[62730]: DEBUG nova.network.neutron [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2286.588064] env[62730]: DEBUG nova.network.neutron [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2286.660898] env[62730]: DEBUG nova.network.neutron [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.670142] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2286.670358] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2286.670544] env[62730]: DEBUG nova.compute.manager [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2286.763503] env[62730]: INFO nova.scheduler.client.report [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Deleted allocations for instance c964b0fe-e985-4f24-a57d-3fa31e73e815 [ 2286.786539] env[62730]: DEBUG oslo_concurrency.lockutils [None req-88412724-6454-4c94-8212-eaa2812cf731 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "c964b0fe-e985-4f24-a57d-3fa31e73e815" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 553.215s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.786539] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "c964b0fe-e985-4f24-a57d-3fa31e73e815" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 160.979s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.786757] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "c964b0fe-e985-4f24-a57d-3fa31e73e815-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2286.787829] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "c964b0fe-e985-4f24-a57d-3fa31e73e815-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.787829] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "c964b0fe-e985-4f24-a57d-3fa31e73e815-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.789074] env[62730]: INFO nova.compute.manager [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Terminating instance [ 2286.791126] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.791391] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.791697] env[62730]: DEBUG nova.network.neutron [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2286.824719] env[62730]: DEBUG nova.network.neutron [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2286.891759] env[62730]: DEBUG nova.network.neutron [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.900986] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "refresh_cache-c964b0fe-e985-4f24-a57d-3fa31e73e815" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2286.901449] env[62730]: DEBUG nova.compute.manager [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2286.901655] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2286.902230] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf045e5c-d8fc-4c1f-a71b-ab9db73f96e4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.912772] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b433a99-8849-44f3-8b1f-eb32283d4676 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.940436] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c964b0fe-e985-4f24-a57d-3fa31e73e815 could not be found. [ 2286.940659] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2286.940842] env[62730]: INFO nova.compute.manager [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2286.941113] env[62730]: DEBUG oslo.service.loopingcall [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2286.941370] env[62730]: DEBUG nova.compute.manager [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2286.941468] env[62730]: DEBUG nova.network.neutron [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2286.979915] env[62730]: DEBUG nova.network.neutron [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2286.988010] env[62730]: DEBUG nova.network.neutron [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.999492] env[62730]: INFO nova.compute.manager [-] [instance: c964b0fe-e985-4f24-a57d-3fa31e73e815] Took 0.06 seconds to deallocate network for instance. [ 2287.100474] env[62730]: DEBUG oslo_concurrency.lockutils [None req-2c7cff90-7ff1-487e-96e2-2c6f05185289 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "c964b0fe-e985-4f24-a57d-3fa31e73e815" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.314s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2294.028266] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2294.028712] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Getting list of instances from cluster (obj){ [ 2294.028712] env[62730]: value = "domain-c8" [ 2294.028712] env[62730]: _type = "ClusterComputeResource" [ 2294.028712] env[62730]: } {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2294.029791] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa674faa-7a98-4547-bef2-366ca8c42a41 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.042850] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Got total of 4 instances {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2304.738753] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2304.739219] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.747056] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.733498] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.751091] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.751329] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances with incomplete migration {{(pid=62730) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 2309.769535] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2309.769931] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2309.783181] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.783559] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.783849] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2309.784102] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2309.785272] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312a459c-b33c-497b-8064-8acb9b69ae3f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.794491] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b471d67-8f49-4676-b491-7ef6cdf4c826 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.808313] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd2a55c-01b5-457c-aa6a-0afa7ac798f7 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.814453] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b384f8-c403-46b8-b6c2-2d4e7f74e86f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.843779] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180554MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2309.843932] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.844173] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.014078] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 344fc477-d506-43bf-9fc7-e03889a43202 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2310.014292] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2310.014428] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2310.014558] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2310.014753] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2310.014946] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=100GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] stats={'failed_builds': '21', 'num_instances': '4', 'num_vm_building': '4', 'num_task_deleting': '4', 'num_os_type_None': '4', 'num_proj_3a2f02e8e5ce4988937c304a6e6858be': '1', 'io_workload': '4', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1', 'num_proj_f54137e1151d46fe9ba541e5e2bce843': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2310.032225] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing inventories for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2310.046328] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating ProviderTree inventory for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2310.046527] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Updating inventory in ProviderTree for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2310.058424] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing aggregate associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, aggregates: None {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2310.075678] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Refreshing trait associations for resource provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62730) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2310.131627] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c32ca7-bfd7-4a51-b5e6-7d8607f14b84 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.140026] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d13f52-eae8-48c3-801c-430f1df10bce {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.169123] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f89f44-9e95-43cd-8c87-02027f46efcb {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.176269] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a3066d-bf24-4d55-b5b2-9088b3011392 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.189326] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2310.199640] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2310.214084] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2310.214319] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.370s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.182245] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.182637] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.182637] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.182827] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62730) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2312.738055] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.738261] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Starting heal instance info cache {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2312.738386] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Rebuilding the list of instances to heal {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2312.753920] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2312.754133] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2312.754278] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2312.754406] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5] Skipping network cache update for instance because it is Building. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2312.754529] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Didn't find any instances for network info cache update. {{(pid=62730) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2313.737613] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2321.004769] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2321.019533] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Getting list of instances from cluster (obj){ [ 2321.019533] env[62730]: value = "domain-c8" [ 2321.019533] env[62730]: _type = "ClusterComputeResource" [ 2321.019533] env[62730]: } {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2321.020922] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63f8d53-ce7b-4c25-9f4a-cc9cde1fdfe6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.033872] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Got total of 4 instances {{(pid=62730) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2321.034065] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 344fc477-d506-43bf-9fc7-e03889a43202 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2321.034313] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 5b182a44-2add-42f6-913d-14c5379e76be {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2321.034527] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2321.034639] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Triggering sync for uuid 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 {{(pid=62730) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2321.034955] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "344fc477-d506-43bf-9fc7-e03889a43202" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.035237] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "5b182a44-2add-42f6-913d-14c5379e76be" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.035505] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.035725] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "1b7fecbe-c43d-44cc-ad0f-bd3565023cd5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2331.738327] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.738964] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Cleaning up deleted instances {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2331.749504] env[62730]: DEBUG nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] There are 0 instances to clean {{(pid=62730) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2333.564121] env[62730]: WARNING oslo_vmware.rw_handles [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles response.begin() [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2333.564121] env[62730]: ERROR oslo_vmware.rw_handles [ 2333.564785] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Downloaded image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2333.566601] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Caching image {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2333.566860] env[62730]: DEBUG nova.virt.vmwareapi.vm_util [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Copying Virtual Disk [datastore2] vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk to [datastore2] vmware_temp/a8aca9d1-0878-43eb-bca1-4d624278dabc/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk {{(pid=62730) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2333.567184] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd33cb6b-7619-4433-b97f-70294e5d4605 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.576098] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 2333.576098] env[62730]: value = "task-4837301" [ 2333.576098] env[62730]: _type = "Task" [ 2333.576098] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.584873] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': task-4837301, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.087828] env[62730]: DEBUG oslo_vmware.exceptions [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Fault InvalidArgument not matched. {{(pid=62730) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2334.088177] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2334.088823] env[62730]: ERROR nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2334.088823] env[62730]: Faults: ['InvalidArgument'] [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Traceback (most recent call last): [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] yield resources [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self.driver.spawn(context, instance, image_meta, [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._fetch_image_if_missing(context, vi) [ 2334.088823] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] image_cache(vi, tmp_image_ds_loc) [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] vm_util.copy_virtual_disk( [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] session._wait_for_task(vmdk_copy_task) [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.wait_for_task(task_ref) [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return evt.wait() [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] result = hub.switch() [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2334.089197] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.greenlet.switch() [ 2334.089513] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2334.089513] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self.f(*self.args, **self.kw) [ 2334.089513] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2334.089513] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] raise exceptions.translate_fault(task_info.error) [ 2334.089513] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2334.089513] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Faults: ['InvalidArgument'] [ 2334.089513] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] [ 2334.089513] env[62730]: INFO nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Terminating instance [ 2334.092611] env[62730]: DEBUG oslo_concurrency.lockutils [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a46adab9-3ef5-4b2e-8d44-bab77576ed71/a46adab9-3ef5-4b2e-8d44-bab77576ed71.vmdk" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2334.092611] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2334.092611] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da523e14-c23d-4361-8b60-a741e9647b78 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.093589] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2334.093773] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2334.094292] env[62730]: DEBUG nova.network.neutron [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2334.108739] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2334.108941] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62730) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2334.110087] env[62730]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0248fc7-0616-4774-8d34-e8352e118423 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.124166] env[62730]: DEBUG oslo_vmware.api [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Waiting for the task: (returnval){ [ 2334.124166] env[62730]: value = "session[5240f961-c89e-a67c-453d-ee41aa8bafbf]52088415-3c3d-5c6d-fc13-06f3d0ff6562" [ 2334.124166] env[62730]: _type = "Task" [ 2334.124166] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.134193] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Preparing fetch location {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2334.134497] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Creating directory with path [datastore2] vmware_temp/81c7212f-5786-46c1-9277-3c7d1b34531b/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2334.134750] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c632b986-a162-4f32-bead-624f287b3da9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.151343] env[62730]: DEBUG nova.network.neutron [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2334.157459] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Created directory with path [datastore2] vmware_temp/81c7212f-5786-46c1-9277-3c7d1b34531b/a46adab9-3ef5-4b2e-8d44-bab77576ed71 {{(pid=62730) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2334.157675] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Fetch image to [datastore2] vmware_temp/81c7212f-5786-46c1-9277-3c7d1b34531b/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk {{(pid=62730) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2334.157854] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to [datastore2] vmware_temp/81c7212f-5786-46c1-9277-3c7d1b34531b/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk on the data store datastore2 {{(pid=62730) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2334.158674] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18311f49-aed5-4c8a-b410-48bcab1b43d6 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.166764] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c59760-cc84-4772-9c96-20d3211516cf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.177585] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fa8a98-a7bd-4c4d-aa86-d8d3e6195e93 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.211287] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a371ee2-8656-4ca9-8af5-383b42cb3baf {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.218378] env[62730]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9d2e8824-9975-45bc-b9b3-db932a85fd41 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.243283] env[62730]: DEBUG nova.virt.vmwareapi.images [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] [instance: 5b182a44-2add-42f6-913d-14c5379e76be] Downloading image file data a46adab9-3ef5-4b2e-8d44-bab77576ed71 to the data store datastore2 {{(pid=62730) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2334.295347] env[62730]: DEBUG nova.network.neutron [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.297496] env[62730]: DEBUG oslo_vmware.rw_handles [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81c7212f-5786-46c1-9277-3c7d1b34531b/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2334.351462] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2334.351850] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2334.352058] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2334.353232] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b1f886-5b4a-4740-b1ea-d7c649eb90e4 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.358096] env[62730]: DEBUG oslo_vmware.rw_handles [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Completed reading data from the image iterator. {{(pid=62730) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2334.358272] env[62730]: DEBUG oslo_vmware.rw_handles [None req-22581506-b9ee-4392-9de5-8a1a10eb7a23 tempest-ServerRescueTestJSON-1601816057 tempest-ServerRescueTestJSON-1601816057-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81c7212f-5786-46c1-9277-3c7d1b34531b/a46adab9-3ef5-4b2e-8d44-bab77576ed71/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62730) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2334.362164] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Unregistering the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2334.362409] env[62730]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e72e38e-762e-4191-b2d8-c79c2864b3f0 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.393012] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Unregistered the VM {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2334.393272] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Deleting contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2334.393450] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Deleting the datastore file [datastore2] 344fc477-d506-43bf-9fc7-e03889a43202 {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2334.393714] env[62730]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64ff6977-ba56-4316-a38a-1aa499e6eb0c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.400532] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for the task: (returnval){ [ 2334.400532] env[62730]: value = "task-4837303" [ 2334.400532] env[62730]: _type = "Task" [ 2334.400532] env[62730]: } to complete. {{(pid=62730) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.408718] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': task-4837303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.911547] env[62730]: DEBUG oslo_vmware.api [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Task: {'id': task-4837303, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034108} completed successfully. {{(pid=62730) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.911936] env[62730]: DEBUG nova.virt.vmwareapi.ds_util [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Deleted the datastore file {{(pid=62730) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2334.912086] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Deleted contents of the VM from datastore datastore2 {{(pid=62730) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2334.912251] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2334.912426] env[62730]: INFO nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Took 0.56 seconds to destroy the instance on the hypervisor. [ 2334.912673] env[62730]: DEBUG oslo.service.loopingcall [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2334.912877] env[62730]: DEBUG nova.compute.manager [-] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2334.915207] env[62730]: DEBUG nova.compute.claims [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Aborting claim: {{(pid=62730) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2334.915392] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2334.915608] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.026778] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51a0752-70d8-4df1-85d8-ba285258505c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.034563] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e476b7ce-10b6-4550-a65e-2b72e8886207 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.065365] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af6d325-c0d3-4bda-baca-36897e7d50d9 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.072699] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7b0ba0-174b-40c4-aa8a-d7f620d758db {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.085674] env[62730]: DEBUG nova.compute.provider_tree [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2335.095290] env[62730]: DEBUG nova.scheduler.client.report [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2335.109314] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.194s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.109828] env[62730]: ERROR nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.109828] env[62730]: Faults: ['InvalidArgument'] [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Traceback (most recent call last): [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self.driver.spawn(context, instance, image_meta, [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._fetch_image_if_missing(context, vi) [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] image_cache(vi, tmp_image_ds_loc) [ 2335.109828] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] vm_util.copy_virtual_disk( [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] session._wait_for_task(vmdk_copy_task) [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.wait_for_task(task_ref) [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return evt.wait() [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] result = hub.switch() [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.greenlet.switch() [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2335.110157] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self.f(*self.args, **self.kw) [ 2335.110635] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2335.110635] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] raise exceptions.translate_fault(task_info.error) [ 2335.110635] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.110635] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Faults: ['InvalidArgument'] [ 2335.110635] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] [ 2335.110635] env[62730]: DEBUG nova.compute.utils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] VimFaultException {{(pid=62730) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2335.111856] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Build of instance 344fc477-d506-43bf-9fc7-e03889a43202 was re-scheduled: A specified parameter was not correct: fileType [ 2335.111856] env[62730]: Faults: ['InvalidArgument'] {{(pid=62730) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2335.112251] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Unplugging VIFs for instance {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2335.112511] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2335.112665] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2335.112830] env[62730]: DEBUG nova.network.neutron [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2335.140493] env[62730]: DEBUG nova.network.neutron [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2335.215091] env[62730]: DEBUG nova.network.neutron [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.227619] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2335.227855] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62730) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2335.228105] env[62730]: DEBUG nova.compute.manager [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Skipping network deallocation for instance since networking was not requested. {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2335.327360] env[62730]: INFO nova.scheduler.client.report [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Deleted allocations for instance 344fc477-d506-43bf-9fc7-e03889a43202 [ 2335.349054] env[62730]: DEBUG oslo_concurrency.lockutils [None req-105c3069-bc68-4ad4-b252-e30524c7deee tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "344fc477-d506-43bf-9fc7-e03889a43202" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 601.648s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.349204] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "344fc477-d506-43bf-9fc7-e03889a43202" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 405.449s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.349352] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "344fc477-d506-43bf-9fc7-e03889a43202-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2335.349735] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "344fc477-d506-43bf-9fc7-e03889a43202-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.349883] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "344fc477-d506-43bf-9fc7-e03889a43202-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.352068] env[62730]: INFO nova.compute.manager [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Terminating instance [ 2335.354116] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquiring lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2335.354297] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Acquired lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2335.354907] env[62730]: DEBUG nova.network.neutron [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Building network info cache for instance {{(pid=62730) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2335.385446] env[62730]: DEBUG nova.network.neutron [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance cache missing network info. {{(pid=62730) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2335.472433] env[62730]: DEBUG nova.network.neutron [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Updating instance_info_cache with network_info: [] {{(pid=62730) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.481302] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Releasing lock "refresh_cache-344fc477-d506-43bf-9fc7-e03889a43202" {{(pid=62730) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2335.481707] env[62730]: DEBUG nova.compute.manager [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Start destroying the instance on the hypervisor. {{(pid=62730) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2335.481904] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Destroying instance {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2335.482441] env[62730]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95d805b9-b210-4c40-86f4-33d56d3f077f {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.492927] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f41ef7-486f-415e-af0f-ce45dfd569ca {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.520505] env[62730]: WARNING nova.virt.vmwareapi.vmops [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 344fc477-d506-43bf-9fc7-e03889a43202 could not be found. [ 2335.520730] env[62730]: DEBUG nova.virt.vmwareapi.vmops [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Instance destroyed {{(pid=62730) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2335.520878] env[62730]: INFO nova.compute.manager [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2335.521133] env[62730]: DEBUG oslo.service.loopingcall [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62730) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2335.521353] env[62730]: DEBUG nova.compute.manager [-] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Deallocating network for instance {{(pid=62730) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2335.521462] env[62730]: DEBUG nova.network.neutron [-] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] deallocate_for_instance() {{(pid=62730) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2335.648254] env[62730]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62730) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2335.648831] env[62730]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-b42c2b75-de47-447c-8b7f-a0e50508539b'] [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2335.649093] env[62730]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2335.649501] env[62730]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2335.649971] env[62730]: ERROR oslo.service.loopingcall [ 2335.650427] env[62730]: ERROR nova.compute.manager [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2335.678990] env[62730]: ERROR nova.compute.manager [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Traceback (most recent call last): [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] ret = obj(*args, **kwargs) [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] exception_handler_v20(status_code, error_body) [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] raise client_exc(message=error_message, [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Neutron server returns request_ids: ['req-b42c2b75-de47-447c-8b7f-a0e50508539b'] [ 2335.678990] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] During handling of the above exception, another exception occurred: [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Traceback (most recent call last): [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._delete_instance(context, instance, bdms) [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._shutdown_instance(context, instance, bdms) [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._try_deallocate_network(context, instance, requested_networks) [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] with excutils.save_and_reraise_exception(): [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2335.679401] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self.force_reraise() [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] raise self.value [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] _deallocate_network_with_retries() [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return evt.wait() [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] result = hub.switch() [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.greenlet.switch() [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2335.679774] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] result = func(*self.args, **self.kw) [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] result = f(*args, **kwargs) [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._deallocate_network( [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self.network_api.deallocate_for_instance( [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] data = neutron.list_ports(**search_opts) [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] ret = obj(*args, **kwargs) [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.list('ports', self.ports_path, retrieve_all, [ 2335.680116] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] ret = obj(*args, **kwargs) [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] for r in self._pagination(collection, path, **params): [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] res = self.get(path, params=params) [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] ret = obj(*args, **kwargs) [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.retry_request("GET", action, body=body, [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] ret = obj(*args, **kwargs) [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2335.680472] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] return self.do_request(method, action, body=body, [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] ret = obj(*args, **kwargs) [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] self._handle_fault_response(status_code, replybody, resp) [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2335.680825] env[62730]: ERROR nova.compute.manager [instance: 344fc477-d506-43bf-9fc7-e03889a43202] [ 2335.709791] env[62730]: DEBUG oslo_concurrency.lockutils [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Lock "344fc477-d506-43bf-9fc7-e03889a43202" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.361s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.710968] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "344fc477-d506-43bf-9fc7-e03889a43202" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.676s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2335.711580] env[62730]: INFO nova.compute.manager [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] During sync_power_state the instance has a pending task (deleting). Skip. [ 2335.711782] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "344fc477-d506-43bf-9fc7-e03889a43202" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.766064] env[62730]: INFO nova.compute.manager [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] [instance: 344fc477-d506-43bf-9fc7-e03889a43202] Successfully reverted task state from None on failure for instance. [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server [None req-07a1cb67-dc09-4a6b-a703-406c31e5cd79 tempest-ServerShowV247Test-886662991 tempest-ServerShowV247Test-886662991-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-b42c2b75-de47-447c-8b7f-a0e50508539b'] [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2335.768758] env[62730]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2335.769303] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2335.769793] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server raise self.value [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2335.770286] env[62730]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.770860] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2335.771424] env[62730]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2335.771898] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2335.771898] env[62730]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2335.771898] env[62730]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2335.771898] env[62730]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2335.771898] env[62730]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2335.771898] env[62730]: ERROR oslo_messaging.rpc.server [ 2364.744514] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2368.736711] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.737954] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.749566] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.749799] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.750046] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.750187] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62730) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2370.751775] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc96974e-a463-44b3-8d06-b69de56fd09d {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.760668] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe84e2d-88a0-4357-90f9-80e50fdd1e27 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.774719] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827d434c-9258-4541-9d14-ab5de0251a5c {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.781595] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ffc2c5-dc6f-40ce-9f7e-a3eeb2a06eec {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.812604] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180527MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=62730) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2370.812822] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.812935] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.872892] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 5b182a44-2add-42f6-913d-14c5379e76be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2370.873101] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 8a2af5ae-9ae8-4ddf-bb95-c7a7ac45d28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2370.873239] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Instance 1b7fecbe-c43d-44cc-ad0f-bd3565023cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62730) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2370.873453] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2370.873613] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=100GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] stats={'failed_builds': '22', 'num_instances': '3', 'num_vm_building': '3', 'num_task_deleting': '3', 'num_os_type_None': '3', 'num_proj_292f9661bffa4d2a98d4d8df60a44534': '1', 'io_workload': '3', 'num_proj_f54137e1151d46fe9ba541e5e2bce843': '1', 'num_proj_861b7ee6cc2444678f4056271d23e872': '1'} {{(pid=62730) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2370.933962] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d0b6ea-feea-401c-bec2-5d6db6b0d494 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.942036] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de9a6cc-c791-468d-8e12-57eca44cfb69 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.971524] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc3ad9b-f977-446a-982a-37f2bcd5c4ec {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.979642] env[62730]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122666fc-7391-421f-aa71-7cd30b1b0394 {{(pid=62730) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.992987] env[62730]: DEBUG nova.compute.provider_tree [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed in ProviderTree for provider: 5ad8d442-72d6-4045-82dd-b3c7e74880a7 {{(pid=62730) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2371.001755] env[62730]: DEBUG nova.scheduler.client.report [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Inventory has not changed for provider 5ad8d442-72d6-4045-82dd-b3c7e74880a7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62730) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2371.017937] env[62730]: DEBUG nova.compute.resource_tracker [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62730) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2371.018249] env[62730]: DEBUG oslo_concurrency.lockutils [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.205s {{(pid=62730) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2372.018438] env[62730]: DEBUG oslo_service.periodic_task [None req-7a3e7af4-8b91-4ac8-8d65-f52d87cae100 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62730) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}