[ 606.243223] env[67424]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=67424) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.243562] env[67424]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=67424) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.243660] env[67424]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=67424) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.244086] env[67424]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 606.334834] env[67424]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=67424) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 606.345095] env[67424]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=67424) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 606.482813] env[67424]: INFO nova.virt.driver [None req-8fe0aae3-b703-4dbf-ac81-f50954674916 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 606.562454] env[67424]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.562626] env[67424]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.562821] env[67424]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=67424) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 609.757406] env[67424]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-0d259565-b3e0-4221-9efd-6e6a2c19cdce {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.773304] env[67424]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=67424) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 609.773445] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-5159cbec-e560-4c80-9711-c31f5d3bd7e2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.804925] env[67424]: INFO oslo_vmware.api [-] Successfully established new session; session ID is c0bd9. [ 609.805099] env[67424]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.242s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.805582] env[67424]: INFO nova.virt.vmwareapi.driver [None req-8fe0aae3-b703-4dbf-ac81-f50954674916 None None] VMware vCenter version: 7.0.3 [ 609.809091] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eeddf57-65ad-4cc5-92a6-6bc447a0fa71 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.826091] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa85131b-351c-4249-be24-d16ccd55f298 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.831989] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9d5b63-619c-4358-a5c3-60f204ae1f9f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.838586] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e93955-5a91-40be-8c0c-6b84e0799468 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.851534] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68946011-aad9-4f3e-9ea2-bab3450220a4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.857383] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266c10ac-ccc4-43d5-a590-0e8debe3ed32 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.887884] env[67424]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-6080d839-dc14-4448-b5ba-1aa14169eb52 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.893011] env[67424]: DEBUG nova.virt.vmwareapi.driver [None req-8fe0aae3-b703-4dbf-ac81-f50954674916 None None] Extension org.openstack.compute already exists. {{(pid=67424) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 609.895642] env[67424]: INFO nova.compute.provider_config [None req-8fe0aae3-b703-4dbf-ac81-f50954674916 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 609.915036] env[67424]: DEBUG nova.context [None req-8fe0aae3-b703-4dbf-ac81-f50954674916 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),bcf5b536-de58-4cee-bf7a-0eb2c9b68198(cell1) {{(pid=67424) load_cells /opt/stack/nova/nova/context.py:464}} [ 609.916191] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.916548] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.917336] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.917836] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Acquiring lock "bcf5b536-de58-4cee-bf7a-0eb2c9b68198" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.918109] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Lock "bcf5b536-de58-4cee-bf7a-0eb2c9b68198" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.919252] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Lock "bcf5b536-de58-4cee-bf7a-0eb2c9b68198" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.949649] env[67424]: INFO dbcounter [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Registered counter for database nova_cell0 [ 609.956063] env[67424]: INFO dbcounter [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Registered counter for database nova_cell1 [ 609.956367] env[67424]: DEBUG oslo_db.sqlalchemy.engines [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=67424) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 609.957106] env[67424]: DEBUG oslo_db.sqlalchemy.engines [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=67424) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 609.962942] env[67424]: DEBUG dbcounter [-] [67424] Writer thread running {{(pid=67424) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 609.963062] env[67424]: DEBUG dbcounter [-] [67424] Writer thread running {{(pid=67424) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 609.969156] env[67424]: ERROR nova.db.main.api [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 609.969156] env[67424]: result = function(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.969156] env[67424]: return func(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 609.969156] env[67424]: result = fn(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 609.969156] env[67424]: return f(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 609.969156] env[67424]: return db.service_get_minimum_version(context, binaries) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 609.969156] env[67424]: _check_db_access() [ 609.969156] env[67424]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 609.969156] env[67424]: stacktrace = ''.join(traceback.format_stack()) [ 609.969156] env[67424]: [ 609.969156] env[67424]: ERROR nova.db.main.api [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 609.969156] env[67424]: result = function(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.969156] env[67424]: return func(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 609.969156] env[67424]: result = fn(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 609.969156] env[67424]: return f(*args, **kwargs) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 609.969156] env[67424]: return db.service_get_minimum_version(context, binaries) [ 609.969156] env[67424]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 609.969156] env[67424]: _check_db_access() [ 609.969156] env[67424]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 609.969156] env[67424]: stacktrace = ''.join(traceback.format_stack()) [ 609.969156] env[67424]: [ 609.970077] env[67424]: WARNING nova.objects.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 609.970077] env[67424]: WARNING nova.objects.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Failed to get minimum service version for cell bcf5b536-de58-4cee-bf7a-0eb2c9b68198 [ 609.970077] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Acquiring lock "singleton_lock" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.970077] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Acquired lock "singleton_lock" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.970077] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Releasing lock "singleton_lock" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.970077] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Full set of CONF: {{(pid=67424) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 609.970244] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ******************************************************************************** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 609.970420] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] Configuration options gathered from: {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 609.970707] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 609.970985] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 609.971214] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ================================================================================ {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 609.972088] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] allow_resize_to_same_host = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.972337] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] arq_binding_timeout = 300 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.972493] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] backdoor_port = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.972631] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] backdoor_socket = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.972805] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] block_device_allocate_retries = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.972971] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] block_device_allocate_retries_interval = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.973164] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cert = self.pem {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.973342] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.973514] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute_monitors = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.973683] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] config_dir = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.973860] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] config_drive_format = iso9660 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.973995] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.974177] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] config_source = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.974346] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] console_host = devstack {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.974513] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] control_exchange = nova {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.974675] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cpu_allocation_ratio = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.974838] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] daemon = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.975012] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] debug = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.975185] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] default_access_ip_network_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.975353] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] default_availability_zone = nova {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.975509] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] default_ephemeral_format = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.975668] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] default_green_pool_size = 1000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.975904] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.976085] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] default_schedule_zone = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.976247] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] disk_allocation_ratio = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.976408] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] enable_new_services = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.976586] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] enabled_apis = ['osapi_compute'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.976750] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] enabled_ssl_apis = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.976911] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] flat_injected = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.977080] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] force_config_drive = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.977242] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] force_raw_images = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.977408] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] graceful_shutdown_timeout = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.977567] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] heal_instance_info_cache_interval = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.977787] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] host = cpu-1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.977966] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.978148] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] initial_disk_allocation_ratio = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.978311] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] initial_ram_allocation_ratio = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.979060] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.979248] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instance_build_timeout = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.979417] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instance_delete_interval = 300 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.979596] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instance_format = [instance: %(uuid)s] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.979798] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instance_name_template = instance-%08x {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.979970] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instance_usage_audit = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.980199] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instance_usage_audit_period = month {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.980338] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.980509] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] instances_path = /opt/stack/data/nova/instances {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.980678] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] internal_service_availability_zone = internal {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.980840] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] key = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.981037] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] live_migration_retry_count = 30 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.981227] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_config_append = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.981403] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.981570] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_dir = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.981757] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.981904] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_options = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.982084] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_rotate_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.982261] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_rotate_interval_type = days {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.982433] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] log_rotation_type = none {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.982564] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.982693] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.982867] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.983047] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.983182] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.983345] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] long_rpc_timeout = 1800 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.983504] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] max_concurrent_builds = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.983664] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] max_concurrent_live_migrations = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.983824] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] max_concurrent_snapshots = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.983980] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] max_local_block_devices = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.984151] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] max_logfile_count = 30 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.984308] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] max_logfile_size_mb = 200 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.984463] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] maximum_instance_delete_attempts = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.984632] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] metadata_listen = 0.0.0.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.984799] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] metadata_listen_port = 8775 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.984969] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] metadata_workers = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.985142] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] migrate_max_retries = -1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.985312] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] mkisofs_cmd = genisoimage {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.985523] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] my_block_storage_ip = 10.180.1.21 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.985658] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] my_ip = 10.180.1.21 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.985825] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] network_allocate_retries = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.986009] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.986186] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] osapi_compute_listen = 0.0.0.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.986351] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] osapi_compute_listen_port = 8774 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.986523] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] osapi_compute_unique_server_name_scope = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.986693] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] osapi_compute_workers = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.986857] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] password_length = 12 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.987030] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] periodic_enable = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.987197] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] periodic_fuzzy_delay = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.987369] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] pointer_model = usbtablet {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.987538] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] preallocate_images = none {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.987701] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] publish_errors = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.987837] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] pybasedir = /opt/stack/nova {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.987992] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ram_allocation_ratio = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.988209] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] rate_limit_burst = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.988333] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] rate_limit_except_level = CRITICAL {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.988496] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] rate_limit_interval = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.988687] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] reboot_timeout = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.988848] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] reclaim_instance_interval = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.989034] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] record = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.989211] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] reimage_timeout_per_gb = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.989381] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] report_interval = 120 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.989544] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] rescue_timeout = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.989731] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] reserved_host_cpus = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.989901] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] reserved_host_disk_mb = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.990075] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] reserved_host_memory_mb = 512 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.990239] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] reserved_huge_pages = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.990400] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] resize_confirm_window = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.990562] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] resize_fs_using_block_device = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.990721] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] resume_guests_state_on_host_boot = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.990893] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.991103] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] rpc_response_timeout = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.991279] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] run_external_periodic_tasks = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.991452] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] running_deleted_instance_action = reap {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.991619] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] running_deleted_instance_poll_interval = 1800 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.991783] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] running_deleted_instance_timeout = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.991944] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler_instance_sync_interval = 120 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.992130] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_down_time = 720 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.992301] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] servicegroup_driver = db {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.992462] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] shelved_offload_time = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.992622] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] shelved_poll_interval = 3600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.992794] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] shutdown_timeout = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.992959] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] source_is_ipv6 = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.993135] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ssl_only = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.993388] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.993558] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] sync_power_state_interval = 600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.993723] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] sync_power_state_pool_size = 1000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.993927] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] syslog_log_facility = LOG_USER {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.994113] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] tempdir = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.994282] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] timeout_nbd = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.994456] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] transport_url = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.994622] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] update_resources_interval = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.994787] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] use_cow_images = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.994948] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] use_eventlog = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.995125] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] use_journal = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.995290] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] use_json = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.995452] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] use_rootwrap_daemon = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.995612] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] use_stderr = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.995774] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] use_syslog = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.995933] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vcpu_pin_set = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.996114] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plugging_is_fatal = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.996284] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plugging_timeout = 300 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.996451] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] virt_mkfs = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.996611] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] volume_usage_poll_interval = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.996769] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] watch_log_file = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.996940] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] web = /usr/share/spice-html5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 609.997139] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_concurrency.disable_process_locking = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.997443] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.997627] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.997796] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.997972] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.998159] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.998328] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.998511] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.auth_strategy = keystone {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.998708] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.compute_link_prefix = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.998889] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.999083] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.dhcp_domain = novalocal {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.999250] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.enable_instance_password = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.999416] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.glance_link_prefix = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.999581] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.999787] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 609.999958] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.instance_list_per_project_cells = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.000138] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.list_records_by_skipping_down_cells = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.000305] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.local_metadata_per_cell = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.000475] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.max_limit = 1000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.000643] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.metadata_cache_expiration = 15 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.000823] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.neutron_default_tenant_id = default {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.001025] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.use_neutron_default_nets = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.001219] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.001388] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.001558] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.001735] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.001909] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.vendordata_dynamic_targets = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.002089] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.vendordata_jsonfile_path = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.002277] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.002471] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.backend = dogpile.cache.memcached {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.002637] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.backend_argument = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.002806] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.config_prefix = cache.oslo {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.002973] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.dead_timeout = 60.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.003147] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.debug_cache_backend = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.003309] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.enable_retry_client = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.003470] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.enable_socket_keepalive = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.003639] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.enabled = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.003802] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.enforce_fips_mode = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.003961] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.expiration_time = 600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.004134] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.hashclient_retry_attempts = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.004299] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.hashclient_retry_delay = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.004457] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_dead_retry = 300 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.004615] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_password = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.004778] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.004943] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.005120] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_pool_maxsize = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.005286] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.005447] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_sasl_enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.005627] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.005814] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_socket_timeout = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.005995] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.memcache_username = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.006180] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.proxies = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.006340] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.redis_password = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.006512] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.redis_sentinel_service_name = mymaster {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.006688] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.006859] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.redis_server = localhost:6379 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.007032] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.redis_socket_timeout = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.007198] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.redis_username = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.007365] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.retry_attempts = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.007556] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.retry_delay = 0.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.007693] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.socket_keepalive_count = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.007855] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.socket_keepalive_idle = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.008020] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.socket_keepalive_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.008182] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.tls_allowed_ciphers = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.008339] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.tls_cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.008497] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.tls_certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.008690] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.tls_enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.008850] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cache.tls_keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.009035] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.009218] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.auth_type = password {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.009383] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.009560] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.catalog_info = volumev3::publicURL {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.009746] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.009920] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.010095] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.cross_az_attach = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.010260] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.debug = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012730] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.endpoint_template = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012730] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.http_retries = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012730] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012730] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012730] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.os_region_name = RegionOne {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012730] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012934] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cinder.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012934] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012934] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.cpu_dedicated_set = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012934] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.cpu_shared_set = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012934] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.image_type_exclude_list = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.012934] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013146] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.max_concurrent_disk_ops = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013146] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.max_disk_devices_to_attach = -1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013146] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013146] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013146] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.resource_provider_association_refresh = 300 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013286] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.shutdown_retry_interval = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013443] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013585] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] conductor.workers = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013760] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] console.allowed_origins = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.013924] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] console.ssl_ciphers = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.014112] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] console.ssl_minimum_version = default {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.014285] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] consoleauth.enforce_session_timeout = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.014452] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] consoleauth.token_ttl = 600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.014621] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.014781] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.014944] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.015121] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.connect_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.015281] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.connect_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.015436] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.endpoint_override = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.015788] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.015788] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.015951] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.max_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.016140] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.min_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.016303] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.region_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.016465] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.retriable_status_codes = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.016625] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.service_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.016799] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.service_type = accelerator {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.016961] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.017133] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.status_code_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.017292] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.status_code_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.017495] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.017623] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.017821] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] cyborg.version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.017956] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.backend = sqlalchemy {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.018144] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.connection = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.018311] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.connection_debug = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.018519] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.connection_parameters = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.018667] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.connection_recycle_time = 3600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.018857] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.connection_trace = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.019031] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.db_inc_retry_interval = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.019200] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.db_max_retries = 20 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.019406] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.db_max_retry_interval = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.019524] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.db_retry_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.019710] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.max_overflow = 50 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.019886] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.max_pool_size = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.020060] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.max_retries = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.020238] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.020399] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.mysql_wsrep_sync_wait = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.020559] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.pool_timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.020722] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.retry_interval = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.020883] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.slave_connection = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.021087] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.sqlite_synchronous = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.021266] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] database.use_db_reconnect = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.021445] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.backend = sqlalchemy {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.021613] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.connection = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.021803] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.connection_debug = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.021984] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.connection_parameters = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.022163] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.connection_recycle_time = 3600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.022327] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.connection_trace = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.022491] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.db_inc_retry_interval = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.022654] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.db_max_retries = 20 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.022817] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.db_max_retry_interval = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.022979] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.db_retry_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.023156] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.max_overflow = 50 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.023320] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.max_pool_size = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.023481] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.max_retries = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.023652] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.023814] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.023969] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.pool_timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.024141] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.retry_interval = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.024301] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.slave_connection = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.024459] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] api_database.sqlite_synchronous = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.024633] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] devices.enabled_mdev_types = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.024814] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.024985] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ephemeral_storage_encryption.default_format = luks {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.025161] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ephemeral_storage_encryption.enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.025322] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.025487] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.api_servers = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.025653] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.025817] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.025980] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.026154] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.connect_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.026313] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.connect_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.026474] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.debug = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.026638] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.default_trusted_certificate_ids = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.026802] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.enable_certificate_validation = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.026964] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.enable_rbd_download = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.027137] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.endpoint_override = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.027306] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.027468] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.027628] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.max_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.027809] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.min_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.028015] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.num_retries = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.028204] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.rbd_ceph_conf = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.028371] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.rbd_connect_timeout = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.028569] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.rbd_pool = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.028767] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.rbd_user = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.028940] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.region_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.029133] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.retriable_status_codes = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.029398] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.service_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.029657] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.service_type = image {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.029888] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.030120] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.status_code_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.030338] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.status_code_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.030557] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.030802] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.031044] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.verify_glance_signatures = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.031299] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] glance.version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.031556] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] guestfs.debug = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.031814] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] mks.enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.032316] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.032585] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] image_cache.manager_interval = 2400 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.032829] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] image_cache.precache_concurrency = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.033088] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] image_cache.remove_unused_base_images = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.033338] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.033580] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.033864] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] image_cache.subdirectory_name = _base {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.034164] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.api_max_retries = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.034453] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.api_retry_interval = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.034735] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.035032] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.auth_type = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.035320] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.035604] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.035885] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.036181] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.conductor_group = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.036463] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.connect_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.036736] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.connect_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.037015] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.endpoint_override = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.037302] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.037572] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.037836] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.max_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.038119] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.min_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.038408] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.peer_list = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.038688] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.region_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.038963] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.retriable_status_codes = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.039249] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.serial_console_state_timeout = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.039526] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.service_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.039828] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.service_type = baremetal {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.040118] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.shard = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.040390] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.040646] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.status_code_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.040902] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.status_code_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.041169] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.041453] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.041710] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ironic.version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.041981] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.042256] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] key_manager.fixed_key = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.042531] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.042765] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.barbican_api_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.042992] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.barbican_endpoint = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.043248] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.barbican_endpoint_type = public {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.043474] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.barbican_region_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.043697] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.043920] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.044159] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.044407] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.044655] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.044913] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.number_of_retries = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.045176] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.retry_delay = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.045431] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.send_service_user_token = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.045685] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.045931] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.046203] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.verify_ssl = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.046448] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican.verify_ssl_path = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.046701] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.046949] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.auth_type = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.047205] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.047449] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.047692] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.047938] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.048187] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.048427] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.048686] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] barbican_service_user.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.048904] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.approle_role_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.049162] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.approle_secret_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.049399] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.049623] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.049850] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.050082] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.050306] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.050539] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.kv_mountpoint = secret {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.050761] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.kv_path = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.051009] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.kv_version = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.051276] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.namespace = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.051526] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.root_token_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.051777] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.052034] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.ssl_ca_crt_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.052284] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.052544] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.use_ssl = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.052843] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.053130] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.053403] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.auth_type = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.053646] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.053882] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.054084] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.054257] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.connect_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.054421] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.connect_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.054582] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.endpoint_override = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.054746] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.054906] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.055077] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.max_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.055239] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.min_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.055398] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.region_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.055554] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.retriable_status_codes = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.055711] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.service_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.055883] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.service_type = identity {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.056056] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.056219] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.status_code_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.056378] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.status_code_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.056556] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.056714] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.056873] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] keystone.version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.057088] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.connection_uri = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.057255] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.cpu_mode = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.057422] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.cpu_model_extra_flags = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.057591] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.cpu_models = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.057765] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.cpu_power_governor_high = performance {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.057936] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.cpu_power_governor_low = powersave {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.058115] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.cpu_power_management = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.058289] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.058458] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.device_detach_attempts = 8 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.058633] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.device_detach_timeout = 20 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.058804] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.disk_cachemodes = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.058969] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.disk_prefix = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.059144] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.enabled_perf_events = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.059308] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.file_backed_memory = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.059472] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.gid_maps = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.059632] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.hw_disk_discard = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.059790] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.hw_machine_type = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.059963] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.images_rbd_ceph_conf = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.060145] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.060311] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.060481] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.images_rbd_glance_store_name = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.060653] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.images_rbd_pool = rbd {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.060825] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.images_type = default {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.061008] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.images_volume_group = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.061196] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.inject_key = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.061363] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.inject_partition = -2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.061560] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.inject_password = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.061728] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.iscsi_iface = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.061892] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.iser_use_multipath = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.062068] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_bandwidth = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.062237] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.062401] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_downtime = 500 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.062565] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.062731] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.062894] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_inbound_addr = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.063312] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.063495] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_permit_post_copy = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.063660] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_scheme = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.064097] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_timeout_action = abort {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.064289] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_tunnelled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.064460] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_uri = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.064630] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.live_migration_with_native_tls = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.064796] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.max_queues = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.064964] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.065216] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.065389] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.nfs_mount_options = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.065693] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.065872] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.066053] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.num_iser_scan_tries = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.066223] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.num_memory_encrypted_guests = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.066392] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.066555] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.num_pcie_ports = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.066725] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.num_volume_scan_tries = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.066890] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.pmem_namespaces = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.067062] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.quobyte_client_cfg = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.067359] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.067532] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rbd_connect_timeout = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.067699] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.067866] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.068038] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rbd_secret_uuid = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.068205] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rbd_user = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.068368] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.068541] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.remote_filesystem_transport = ssh {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.068727] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rescue_image_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.068895] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rescue_kernel_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.069063] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rescue_ramdisk_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.069238] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.069400] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.rx_queue_size = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.069571] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.smbfs_mount_options = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.069846] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.070031] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.snapshot_compression = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.070197] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.snapshot_image_format = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.070416] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.070582] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.sparse_logical_volumes = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.070745] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.swtpm_enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.070917] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.swtpm_group = tss {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.071128] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.swtpm_user = tss {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.071307] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.sysinfo_serial = unique {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.071489] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.tb_cache_size = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.071657] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.tx_queue_size = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.071826] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.uid_maps = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.071989] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.use_virtio_for_bridges = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.072173] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.virt_type = kvm {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.072342] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.volume_clear = zero {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.072504] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.volume_clear_size = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.072670] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.volume_use_multipath = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.072831] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.vzstorage_cache_path = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.073012] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.073184] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.vzstorage_mount_group = qemu {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.073343] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.vzstorage_mount_opts = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.073511] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.073784] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.073962] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.vzstorage_mount_user = stack {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.074140] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.074313] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.074489] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.auth_type = password {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.074652] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.074815] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.074980] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.075152] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.connect_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.075323] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.connect_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.075495] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.default_floating_pool = public {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.075655] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.endpoint_override = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.075822] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.extension_sync_interval = 600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.075982] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.http_retries = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.076161] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.076323] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.076482] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.max_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.076654] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.076815] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.min_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.076983] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.ovs_bridge = br-int {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.077159] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.physnets = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.077331] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.region_name = RegionOne {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.077490] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.retriable_status_codes = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.077659] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.service_metadata_proxy = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.077823] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.service_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.077991] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.service_type = network {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.078167] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.078328] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.status_code_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.078484] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.status_code_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.078689] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.078857] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.079030] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] neutron.version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.079208] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] notifications.bdms_in_notifications = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.079386] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] notifications.default_level = INFO {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.079564] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] notifications.notification_format = unversioned {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.079749] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] notifications.notify_on_state_change = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.079937] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.080128] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] pci.alias = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.080301] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] pci.device_spec = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.080467] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] pci.report_in_placement = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.080640] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.080818] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.auth_type = password {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.080998] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.081181] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.081340] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.081502] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.081661] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.connect_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.081821] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.connect_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.081975] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.default_domain_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.082148] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.default_domain_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.082303] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.domain_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.082460] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.domain_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.082615] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.endpoint_override = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.082775] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.082933] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.083099] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.max_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.083259] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.min_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.083426] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.password = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.083588] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.project_domain_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.083755] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.project_domain_name = Default {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.083926] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.project_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.084110] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.project_name = service {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.084284] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.region_name = RegionOne {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.084445] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.retriable_status_codes = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.084604] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.service_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.084774] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.service_type = placement {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.084940] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.085111] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.status_code_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.085277] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.status_code_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.085434] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.system_scope = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.085590] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.085749] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.trust_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.085906] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.user_domain_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.086085] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.user_domain_name = Default {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.086250] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.user_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.086422] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.username = placement {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.086598] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.086778] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] placement.version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.086963] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.cores = 20 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.087142] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.count_usage_from_placement = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.087315] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.087486] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.injected_file_content_bytes = 10240 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.087653] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.injected_file_path_length = 255 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.087820] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.injected_files = 5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.087986] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.instances = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.088164] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.key_pairs = 100 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.088331] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.metadata_items = 128 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.088496] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.ram = 51200 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.088683] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.recheck_quota = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.088868] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.server_group_members = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.089047] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] quota.server_groups = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.089259] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.089388] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.089552] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.image_metadata_prefilter = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.089747] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.089937] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.max_attempts = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.090130] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.max_placement_results = 1000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.090290] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.090449] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.query_placement_for_image_type_support = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.090609] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.090784] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] scheduler.workers = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.090964] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.091147] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.091326] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.091497] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.091667] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.091827] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.091989] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.092189] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.092357] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.host_subset_size = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.092518] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.092676] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.092843] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.093016] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.isolated_hosts = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.093189] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.isolated_images = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.093350] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.093510] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.093681] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.093885] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.pci_in_placement = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.094069] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.094234] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.094401] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.094590] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.094724] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.094886] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.095060] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.track_instance_changes = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.095240] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.095408] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] metrics.required = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.095571] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] metrics.weight_multiplier = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.095733] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.095899] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] metrics.weight_setting = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.096232] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.096411] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] serial_console.enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.096590] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] serial_console.port_range = 10000:20000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.096765] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.096938] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.097121] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] serial_console.serialproxy_port = 6083 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.097295] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.097474] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.auth_type = password {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.097667] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.097802] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.097967] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.098144] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.098304] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.098478] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.send_service_user_token = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.098659] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.098832] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] service_user.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.099015] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.agent_enabled = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.099210] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.099532] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.099760] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.099951] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.html5proxy_port = 6082 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.100133] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.image_compression = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.100342] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.jpeg_compression = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.100457] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.playback_compression = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.100631] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.server_listen = 127.0.0.1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.100799] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.100959] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.streaming_mode = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.101129] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] spice.zlib_compression = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.101296] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] upgrade_levels.baseapi = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.101467] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] upgrade_levels.compute = auto {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.101628] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] upgrade_levels.conductor = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.101788] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] upgrade_levels.scheduler = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.101954] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.102128] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.auth_type = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.102288] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.102449] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.102606] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.102768] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.102928] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.103103] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.103263] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vendordata_dynamic_auth.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.103436] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.api_retry_count = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.103597] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.ca_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.103770] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.cache_prefix = devstack-image-cache {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.103942] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.cluster_name = testcl1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.104120] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.connection_pool_size = 10 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.104281] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.console_delay_seconds = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.104450] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.datastore_regex = ^datastore.* {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.104662] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.104842] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.host_password = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.105012] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.host_port = 443 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.105190] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.host_username = administrator@vsphere.local {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.105361] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.insecure = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.105523] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.integration_bridge = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.105686] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.maximum_objects = 100 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.105846] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.pbm_default_policy = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.106015] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.pbm_enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.106180] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.pbm_wsdl_location = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.106350] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.106508] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.serial_port_proxy_uri = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.106668] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.serial_port_service_uri = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.106837] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.task_poll_interval = 0.5 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.107022] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.use_linked_clone = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.107196] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.vnc_keymap = en-us {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.107365] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.vnc_port = 5900 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.107528] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vmware.vnc_port_total = 10000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.107714] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.auth_schemes = ['none'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.107888] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.108195] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.108385] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.108559] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.novncproxy_port = 6080 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.108764] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.server_listen = 127.0.0.1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.108949] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.109125] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.vencrypt_ca_certs = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.109288] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.vencrypt_client_cert = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.109457] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vnc.vencrypt_client_key = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.109628] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.109828] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.disable_deep_image_inspection = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.110008] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.110179] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.110340] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.110555] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.disable_rootwrap = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.110660] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.enable_numa_live_migration = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.110824] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.110982] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.111153] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.111313] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.libvirt_disable_apic = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.111470] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.111628] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.111789] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.111948] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.112120] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.112281] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.112439] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.112602] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.112788] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.112973] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.113148] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.113329] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.client_socket_timeout = 900 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.113496] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.default_pool_size = 1000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.113661] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.keep_alive = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.113828] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.max_header_line = 16384 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.113990] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.secure_proxy_ssl_header = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.114162] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.ssl_ca_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.114321] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.ssl_cert_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.114479] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.ssl_key_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.114639] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.tcp_keepidle = 600 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.114814] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.114980] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] zvm.ca_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.115152] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] zvm.cloud_connector_url = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.115440] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.115613] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] zvm.reachable_timeout = 300 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.115792] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.enforce_new_defaults = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.115966] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.enforce_scope = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.116156] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.policy_default_rule = default {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.116338] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.116514] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.policy_file = policy.yaml {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.116687] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.116850] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.117024] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.117179] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.117342] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.117510] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.117684] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.117872] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.connection_string = messaging:// {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.118053] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.enabled = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.118228] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.es_doc_type = notification {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.118390] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.es_scroll_size = 10000 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.118559] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.es_scroll_time = 2m {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.118750] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.filter_error_trace = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.118940] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.hmac_keys = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.119125] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.sentinel_service_name = mymaster {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.119292] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.socket_timeout = 0.1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.119455] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.trace_requests = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.119623] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler.trace_sqlalchemy = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.119836] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler_jaeger.process_tags = {} {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.120217] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler_jaeger.service_name_prefix = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.120403] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] profiler_otlp.service_name_prefix = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.120573] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] remote_debug.host = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.120740] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] remote_debug.port = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.120929] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.121098] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.121266] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.121428] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.121590] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.121750] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.121914] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.122089] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.122255] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.122427] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.122588] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.122784] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.122961] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.123144] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.123316] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.123483] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.123645] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.123823] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.123980] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.124153] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.124317] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.124482] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.124643] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.124809] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.124970] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.125142] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.125304] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.125463] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.125628] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.125792] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.ssl = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.125964] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.126192] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.126386] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.126566] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.126738] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.ssl_version = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.126904] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.127105] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.127274] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_notifications.retry = -1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.127454] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.127631] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_messaging_notifications.transport_url = **** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.127802] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.auth_section = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.127965] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.auth_type = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.128137] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.cafile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.128295] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.certfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.128456] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.collect_timing = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.128611] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.connect_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.128792] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.connect_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.128956] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.endpoint_id = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.129126] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.endpoint_override = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.129288] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.insecure = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.129443] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.keyfile = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.129606] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.max_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.129774] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.min_version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.129975] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.region_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.130164] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.retriable_status_codes = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.130324] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.service_name = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.130480] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.service_type = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.130640] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.split_loggers = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.130798] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.status_code_retries = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.130954] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.status_code_retry_delay = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.131120] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.timeout = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.131275] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.valid_interfaces = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.131430] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_limit.version = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.131592] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_reports.file_event_handler = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.131751] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.131907] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] oslo_reports.log_dir = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.132085] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.132245] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.132398] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.132565] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.132726] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.132908] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.133131] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.133298] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_ovs_privileged.group = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.133457] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.133622] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.133786] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.133946] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] vif_plug_ovs_privileged.user = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.134128] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.flat_interface = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.134310] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.134752] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.134752] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.134822] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.134973] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.135157] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.135323] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.135501] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.135674] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_ovs.isolate_vif = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.135848] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.136018] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.136192] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.136362] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_ovs.ovsdb_interface = native {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.136525] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_vif_ovs.per_port_bridge = False {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.136693] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_brick.lock_path = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.136858] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.137029] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.137203] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] privsep_osbrick.capabilities = [21] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.137364] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] privsep_osbrick.group = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.137521] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] privsep_osbrick.helper_command = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.137739] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.137924] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.138099] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] privsep_osbrick.user = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.138275] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.138433] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] nova_sys_admin.group = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.138589] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] nova_sys_admin.helper_command = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.138780] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.138949] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.139120] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] nova_sys_admin.user = None {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 610.139249] env[67424]: DEBUG oslo_service.service [None req-b273c879-41fa-4e17-981d-4e8520e894d0 None None] ******************************************************************************** {{(pid=67424) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 610.140024] env[67424]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 610.149946] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Getting list of instances from cluster (obj){ [ 610.149946] env[67424]: value = "domain-c8" [ 610.149946] env[67424]: _type = "ClusterComputeResource" [ 610.149946] env[67424]: } {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 610.151213] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1af036-173f-43db-b6f0-cafc7350cafe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.161186] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Got total of 0 instances {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 610.161755] env[67424]: WARNING nova.virt.vmwareapi.driver [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 610.162268] env[67424]: INFO nova.virt.node [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Generated node identity b21acede-6243-4c82-934a-a3956380220f [ 610.162479] env[67424]: INFO nova.virt.node [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Wrote node identity b21acede-6243-4c82-934a-a3956380220f to /opt/stack/data/n-cpu-1/compute_id [ 610.181641] env[67424]: WARNING nova.compute.manager [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Compute nodes ['b21acede-6243-4c82-934a-a3956380220f'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 610.220112] env[67424]: INFO nova.compute.manager [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 610.245757] env[67424]: WARNING nova.compute.manager [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 610.246367] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.246367] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.246367] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.246589] env[67424]: DEBUG nova.compute.resource_tracker [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 610.247621] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4f7c8e-ad8c-4c41-bed6-e317b9096125 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.255992] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082e2574-aa33-4fd0-a32a-bcb339d89760 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.273240] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893b8ca2-d760-42ab-a802-8fccb5a550fb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.279705] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a7d7f8-8be8-4c65-ba47-5a15be5bd64f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.309863] env[67424]: DEBUG nova.compute.resource_tracker [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181021MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 610.310052] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.310222] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.325289] env[67424]: WARNING nova.compute.resource_tracker [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] No compute node record for cpu-1:b21acede-6243-4c82-934a-a3956380220f: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host b21acede-6243-4c82-934a-a3956380220f could not be found. [ 610.337683] env[67424]: INFO nova.compute.resource_tracker [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: b21acede-6243-4c82-934a-a3956380220f [ 610.391681] env[67424]: DEBUG nova.compute.resource_tracker [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 610.391871] env[67424]: DEBUG nova.compute.resource_tracker [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 610.498881] env[67424]: INFO nova.scheduler.client.report [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] [req-c6d6aca7-deac-44d9-8a5b-ab112750a537] Created resource provider record via placement API for resource provider with UUID b21acede-6243-4c82-934a-a3956380220f and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 610.516743] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388826d7-0af0-4fe9-bd46-2abdd9d9b1c9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.525019] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40885a6f-b020-491f-996e-0f487282605a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.555129] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8683df-56e2-4b21-9e1d-8a786b682ae1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.562706] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65721eb1-3c1f-4fad-8ac3-ff1de7e7d51f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.576826] env[67424]: DEBUG nova.compute.provider_tree [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.617446] env[67424]: DEBUG nova.scheduler.client.report [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Updated inventory for provider b21acede-6243-4c82-934a-a3956380220f with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 610.617697] env[67424]: DEBUG nova.compute.provider_tree [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Updating resource provider b21acede-6243-4c82-934a-a3956380220f generation from 0 to 1 during operation: update_inventory {{(pid=67424) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 610.617842] env[67424]: DEBUG nova.compute.provider_tree [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.665675] env[67424]: DEBUG nova.compute.provider_tree [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Updating resource provider b21acede-6243-4c82-934a-a3956380220f generation from 1 to 2 during operation: update_traits {{(pid=67424) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 610.684773] env[67424]: DEBUG nova.compute.resource_tracker [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 610.684966] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.375s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.685147] env[67424]: DEBUG nova.service [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Creating RPC server for service compute {{(pid=67424) start /opt/stack/nova/nova/service.py:182}} [ 610.698318] env[67424]: DEBUG nova.service [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] Join ServiceGroup membership for this service compute {{(pid=67424) start /opt/stack/nova/nova/service.py:199}} [ 610.698501] env[67424]: DEBUG nova.servicegroup.drivers.db [None req-b511068a-a5fc-4b82-9dd6-c16bf5b8d0d5 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=67424) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 619.964612] env[67424]: DEBUG dbcounter [-] [67424] Writing DB stats nova_cell0:SELECT=1 {{(pid=67424) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 619.965432] env[67424]: DEBUG dbcounter [-] [67424] Writing DB stats nova_cell1:SELECT=1 {{(pid=67424) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 634.700970] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 634.711840] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Getting list of instances from cluster (obj){ [ 634.711840] env[67424]: value = "domain-c8" [ 634.711840] env[67424]: _type = "ClusterComputeResource" [ 634.711840] env[67424]: } {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 634.712935] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f713d3b5-0eca-44dc-bcc6-1d5d90513c25 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.721615] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Got total of 0 instances {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 634.721839] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 634.722171] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Getting list of instances from cluster (obj){ [ 634.722171] env[67424]: value = "domain-c8" [ 634.722171] env[67424]: _type = "ClusterComputeResource" [ 634.722171] env[67424]: } {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 634.723020] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055567f6-82ce-4e06-a2fd-356f7ee325d5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.730622] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Got total of 0 instances {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 653.942151] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.942618] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.970477] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 654.096833] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.098770] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.100829] env[67424]: INFO nova.compute.claims [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.223434] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d92199-2ec4-4889-9cd3-80968420e500 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.233818] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688c87fb-9f5d-46a3-be10-ec1bd0d1821e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.267603] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8804ff-273b-423d-a38f-7abfd2676c09 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.277421] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1882f44b-69c1-4a11-8650-659452d2f96f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.293183] env[67424]: DEBUG nova.compute.provider_tree [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.304113] env[67424]: DEBUG nova.scheduler.client.report [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 654.325380] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.228s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.328215] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 654.411654] env[67424]: DEBUG nova.compute.utils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 654.416059] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 654.416666] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 654.440160] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 654.546874] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 654.675865] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "ef935349-cb7c-4aaa-a735-a010501c5ed4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.676293] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "ef935349-cb7c-4aaa-a735-a010501c5ed4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.697534] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 654.791494] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.791764] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.793314] env[67424]: INFO nova.compute.claims [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.925813] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.926117] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.926264] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.926455] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.926599] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.926791] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.926960] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.927178] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.927618] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.927849] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.928100] env[67424]: DEBUG nova.virt.hardware [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.932047] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535b4e18-33b9-4e22-ab8a-b05adfe92443 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.943050] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b970b8e-08cd-4717-be56-10b801170268 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.965592] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8b8225-70e0-466a-a6a1-9aee04755345 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.003372] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquiring lock "0786e0b6-8cc3-4577-b1b0-d01a8361666e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.004061] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Lock "0786e0b6-8cc3-4577-b1b0-d01a8361666e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.018981] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 655.042438] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541e239f-073e-4a10-97e8-084d1c843b8e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.051419] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8934ed4e-7d93-478f-94ba-19a4d14059fe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.086598] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe562b6-93df-4eda-9f40-bce79772ccb4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.105579] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c5f850-5317-4202-8533-3e9a40ba4780 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.123086] env[67424]: DEBUG nova.compute.provider_tree [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.125345] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.143616] env[67424]: DEBUG nova.policy [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5d1a664b6bda4886aa8e1907fa47343f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1cdda45146654ae89298886a4c347472', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 655.146565] env[67424]: DEBUG nova.scheduler.client.report [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 655.174292] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.382s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.174904] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 655.177454] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.052s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.178860] env[67424]: INFO nova.compute.claims [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.224525] env[67424]: DEBUG nova.compute.utils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.225993] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 655.226996] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 655.239648] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 655.326022] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3f76cb-a0ab-4e05-b69a-3ae9b3063dcc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.332415] env[67424]: DEBUG nova.policy [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b60d85e24bb14f5c8fdd4a4a610dcaee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '849b2bdca65b4c4ca62478339dba3db1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 655.345131] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 655.350287] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15d50dc-14af-4926-8893-dc06f14cc45e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.385694] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e34b22-6c2c-4bc5-bd53-2705f4516149 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.394456] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a439aa3-917e-4e71-9482-ae855e40b73a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.400865] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.401107] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.402765] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.402765] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.402765] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.402765] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.402765] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.402949] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.402949] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.402949] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.402949] env[67424]: DEBUG nova.virt.hardware [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.403309] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45113aaa-2969-4c88-abc3-d5178370159d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.416829] env[67424]: DEBUG nova.compute.provider_tree [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.431116] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa53274-316c-4387-b6b3-ac721e585624 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.436014] env[67424]: DEBUG nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 655.460054] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.282s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.460623] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 655.526339] env[67424]: DEBUG nova.compute.utils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.527680] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Not allocating networking since 'none' was specified. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 655.545837] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 655.632023] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 655.666517] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.667200] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.667290] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.667508] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.667936] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.667936] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.668169] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.668406] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.668951] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.669932] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.671271] env[67424]: DEBUG nova.virt.hardware [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.671443] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a09924-7a1b-4db0-8c07-9418f489165f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.681968] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c233fc1-e437-475b-8f3d-691896cdd5d9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.696703] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Instance VIF info [] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.708296] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 655.709029] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5acbab24-4ea2-46a4-ac3c-d26ddc363d0c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.723031] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Created folder: OpenStack in parent group-v4. [ 655.723031] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Creating folder: Project (3cfe09f8f17b4a84a9f020776d683283). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 655.723031] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fb66460-43c1-47aa-b7cc-9a89ed41c6ef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.735630] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Created folder: Project (3cfe09f8f17b4a84a9f020776d683283) in parent group-v639843. [ 655.735690] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Creating folder: Instances. Parent ref: group-v639844. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 655.736116] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6735195a-8835-49ba-ad44-411e2ae7751f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.745631] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Created folder: Instances in parent group-v639844. [ 655.745972] env[67424]: DEBUG oslo.service.loopingcall [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.746245] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 655.746592] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1feaff2-0277-4491-b9bf-e10485118dd2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.769319] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.769319] env[67424]: value = "task-3199886" [ 655.769319] env[67424]: _type = "Task" [ 655.769319] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.778331] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199886, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.113686] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquiring lock "eae4bf0e-4a44-4929-92f7-e4f4b6966187" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.113996] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Lock "eae4bf0e-4a44-4929-92f7-e4f4b6966187" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.129321] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 656.202847] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.202847] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.204443] env[67424]: INFO nova.compute.claims [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.287953] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199886, 'name': CreateVM_Task, 'duration_secs': 0.301963} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.288303] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 656.291016] env[67424]: DEBUG oslo_vmware.service [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2598b2a7-3edf-4939-bcd9-8df6dcf7d78e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.301336] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.301336] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.307504] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 656.307504] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-394581ec-d6fa-4aba-a3d6-7c7961354bec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.311010] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Waiting for the task: (returnval){ [ 656.311010] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52078338-26fb-54bc-da12-1928fdf53e9f" [ 656.311010] env[67424]: _type = "Task" [ 656.311010] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.325555] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52078338-26fb-54bc-da12-1928fdf53e9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.347094] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f1e07b-6bc3-4146-a55e-6f182cfa20fa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.356643] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1b5b8d-a81c-4379-a99d-768a5aec28ca {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.400376] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48735af7-5545-4bec-af37-08bb6e23f892 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.409826] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5792406a-8dd3-47c7-82bb-df670b27f98b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.425352] env[67424]: DEBUG nova.compute.provider_tree [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.444978] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Successfully created port: 998539a9-9dd1-40fb-9a6c-14761c151b3b {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.449680] env[67424]: DEBUG nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 656.452846] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Successfully created port: 7993e4fb-7427-4089-aae4-76593ed601ce {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.472370] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.472881] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 656.552889] env[67424]: DEBUG nova.compute.utils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.556017] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Not allocating networking since 'none' was specified. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 656.576821] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 656.699015] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 656.739376] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 656.739642] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 656.739793] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.739968] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 656.740164] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.740272] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 656.740470] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 656.740623] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 656.740804] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 656.741850] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 656.741850] env[67424]: DEBUG nova.virt.hardware [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 656.743569] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381eb5cd-0f08-4942-94fc-bfd879fd3160 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.753814] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf9635b-828e-4215-a3e7-e8344e48a2f0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.773623] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Instance VIF info [] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.782893] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Creating folder: Project (4aeed8be23914a47884ad12e9805b71d). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.785378] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5db05b1-cc86-4b3f-9805-eeace60a3997 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.800253] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Created folder: Project (4aeed8be23914a47884ad12e9805b71d) in parent group-v639843. [ 656.800253] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Creating folder: Instances. Parent ref: group-v639847. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.800388] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ece4f01e-8642-441f-8c9f-ec683796b53e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.812013] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Created folder: Instances in parent group-v639847. [ 656.812013] env[67424]: DEBUG oslo.service.loopingcall [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.812216] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 656.812456] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6df253c-dc13-4388-ac94-1d1636cfcff9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.849531] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.851231] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.851231] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.851231] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.851565] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.851688] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.851688] env[67424]: value = "task-3199889" [ 656.851688] env[67424]: _type = "Task" [ 656.851688] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.852376] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-494bd2d9-480f-4555-a538-1196501c225d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.872843] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199889, 'name': CreateVM_Task} progress is 6%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.878953] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.878953] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 656.879513] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6c0cff-edd8-4857-944a-4dd1cae5bb17 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.888477] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe474d87-e5ac-4c30-b844-b4eba86cf67b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.897668] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Waiting for the task: (returnval){ [ 656.897668] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d44361-5a15-b5c0-c039-cf43c273e7f9" [ 656.897668] env[67424]: _type = "Task" [ 656.897668] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.906289] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d44361-5a15-b5c0-c039-cf43c273e7f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.071064] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.071307] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.081601] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 657.158319] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.158577] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.160434] env[67424]: INFO nova.compute.claims [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.301270] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5502113e-ad40-472c-ba27-2d467d2152fd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.311313] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9b823c-a7b2-4cca-a3f1-cf0f6d8b991e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.345254] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f665dbb-84ea-4521-b770-bf1fcf710380 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.353168] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500b5e82-185d-4872-984b-ba04cc64d478 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.375082] env[67424]: DEBUG nova.compute.provider_tree [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.378256] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199889, 'name': CreateVM_Task, 'duration_secs': 0.299293} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.378672] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 657.379135] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.379354] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.380888] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 657.380888] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bec5e019-6c81-4574-8dc3-766ae1a66f9b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.388354] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Waiting for the task: (returnval){ [ 657.388354] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b4dba6-1e53-e510-fc25-e0cda1c2d64a" [ 657.388354] env[67424]: _type = "Task" [ 657.388354] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.392170] env[67424]: DEBUG nova.scheduler.client.report [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 657.405491] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.406602] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.407789] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.413616] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 657.414653] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Creating directory with path [datastore2] vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.414653] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-602aa1a6-6fe3-485e-99fa-9427e4188ebe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.424811] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.266s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.425460] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 657.434965] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Created directory with path [datastore2] vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.435170] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Fetch image to [datastore2] vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 657.435334] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 657.439998] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c244f4d-88e6-460e-bc8a-4e26c1ca1946 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.447146] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-decf786c-e0a0-4dfa-b37d-1b40938c976b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.458208] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a347b1-280a-4b48-9a7b-6ee90f781ec3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.494436] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65d2f14-5499-4b60-868d-ccf7f5aee882 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.499019] env[67424]: DEBUG nova.compute.utils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 657.503029] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 657.503029] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 657.507391] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-63266037-16b3-476a-8e85-63e0a79b4a5c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.520538] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 657.532188] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 657.618229] env[67424]: DEBUG oslo_vmware.rw_handles [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 657.685223] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 657.692365] env[67424]: DEBUG oslo_vmware.rw_handles [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 657.692657] env[67424]: DEBUG oslo_vmware.rw_handles [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 657.722048] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 657.722343] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 657.722564] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.722743] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 657.722929] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.726316] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 657.726760] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 657.727042] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 657.730064] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 657.730064] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 657.730064] env[67424]: DEBUG nova.virt.hardware [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 657.730064] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99165ad-0e64-4b3a-9214-e0e4b4b42eef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.743800] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7bcd82-8079-4463-ab5c-dc427d312179 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.778207] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.778476] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.794644] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 657.880612] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.880917] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.883921] env[67424]: INFO nova.compute.claims [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.999434] env[67424]: DEBUG nova.policy [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48d5393c8dd44b8980673424bf58c3ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb47f8f5ec7645adbd5f78719656f0e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 658.103070] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33643a0b-b9bd-47a4-8cae-238d572c8962 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.110844] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b3bb4a-9ad0-4032-a694-2a7155f8c321 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.160510] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95303c3-b2fe-49f7-a8cc-2dd1fea86f30 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.170223] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba928e17-fc78-461f-9414-c451af288d3d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.184832] env[67424]: DEBUG nova.compute.provider_tree [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.202029] env[67424]: DEBUG nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 658.219277] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.338s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.219944] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 658.283123] env[67424]: DEBUG nova.compute.utils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.288158] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 658.288158] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 658.300685] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 658.385230] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 658.414894] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 658.415051] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 658.415664] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.415664] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 658.415789] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.415893] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 658.416198] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 658.416376] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 658.416601] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 658.417018] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 658.417424] env[67424]: DEBUG nova.virt.hardware [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 658.418497] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09efbe5e-de7f-456b-8e36-ae2b1c1301a9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.430973] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addb072c-4e1a-413c-a8f7-c3de47439c8e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.553352] env[67424]: DEBUG nova.policy [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16de07e9b3d74dd18c61c4fc9aa6d6bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b119167e1684468860a03ea4a053ffb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 659.318675] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "cf9e3c04-b1be-41a3-b408-de1f48fa96c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.318964] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "cf9e3c04-b1be-41a3-b408-de1f48fa96c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.332073] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 659.391506] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.392167] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.393271] env[67424]: INFO nova.compute.claims [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.607973] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f063b2b4-e916-4da3-8139-dc4b85ca0463 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.617134] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba55bfe-bc73-40e1-86b1-753174aa9a2a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.654919] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece4e089-3a68-47a8-96e4-d959728bf433 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.662352] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc06c46-583c-4eec-a24f-59d9cbaf8cef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.681121] env[67424]: DEBUG nova.compute.provider_tree [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.700956] env[67424]: DEBUG nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 659.729506] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.729804] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 659.773325] env[67424]: DEBUG nova.compute.utils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 659.774810] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 659.774911] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 659.795369] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 659.905070] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 659.918443] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "8556fc83-206e-4e50-bd54-4185132497a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.918443] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "8556fc83-206e-4e50-bd54-4185132497a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.937814] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 659.946339] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.946339] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.946339] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.946573] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.946614] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.946752] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.947089] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.947170] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.948091] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.948091] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.948091] env[67424]: DEBUG nova.virt.hardware [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.948985] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efe5ecd-f10c-4b2f-9132-bf9f55354cf9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.963632] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64762e5d-4535-41b9-8d5f-5fcb5bb27565 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.031792] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.031792] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.032753] env[67424]: INFO nova.compute.claims [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.114430] env[67424]: DEBUG nova.policy [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b60d85e24bb14f5c8fdd4a4a610dcaee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '849b2bdca65b4c4ca62478339dba3db1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 660.276060] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e010a40-f782-4e62-bca3-88860139ad14 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.289657] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47795557-da21-48bd-982a-0f04c7449824 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.337894] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66260eb4-66ea-44ab-9b52-fe8e9cd8f4ad {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.354485] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dd3d3b-77cf-4ec0-b38c-8ab7069abd24 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.380554] env[67424]: DEBUG nova.compute.provider_tree [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.400081] env[67424]: DEBUG nova.scheduler.client.report [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 660.443630] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.413s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.443630] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 660.500158] env[67424]: DEBUG nova.compute.utils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.500158] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 660.500158] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 660.518797] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 660.575023] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Successfully created port: 7a7feac0-d9ef-4db8-8fc6-a6159036c572 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.614581] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 660.652525] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 660.652724] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 660.652724] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.652925] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 660.653761] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.653761] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 660.653761] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 660.653906] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 660.654802] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 660.654802] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 660.654802] env[67424]: DEBUG nova.virt.hardware [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 660.655866] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d555b48-feb8-44c5-a161-a39627dd262e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.666256] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29efcc14-af00-4c34-b0fa-303aec88d894 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.922819] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Successfully created port: 4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.998797] env[67424]: DEBUG nova.policy [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e52276487a764d8ca00b86811e781743', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c83392692ea44a5a9098a96e9aa0332', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 661.229360] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Successfully updated port: 7993e4fb-7427-4089-aae4-76593ed601ce {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.249520] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "refresh_cache-f9097bb5-5320-49e6-9c9a-6397a176a5a3" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.250727] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquired lock "refresh_cache-f9097bb5-5320-49e6-9c9a-6397a176a5a3" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.250727] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.636300] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.659404] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Successfully updated port: 998539a9-9dd1-40fb-9a6c-14761c151b3b {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.672465] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "refresh_cache-ef935349-cb7c-4aaa-a735-a010501c5ed4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.672615] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "refresh_cache-ef935349-cb7c-4aaa-a735-a010501c5ed4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.672760] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.921480] env[67424]: DEBUG nova.compute.manager [req-9dd77e5a-8bb3-43a7-9e5e-4992288bd860 req-26b1ee4e-2558-46ff-8a6f-698bcf2c043e service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Received event network-vif-plugged-998539a9-9dd1-40fb-9a6c-14761c151b3b {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 661.921480] env[67424]: DEBUG oslo_concurrency.lockutils [req-9dd77e5a-8bb3-43a7-9e5e-4992288bd860 req-26b1ee4e-2558-46ff-8a6f-698bcf2c043e service nova] Acquiring lock "ef935349-cb7c-4aaa-a735-a010501c5ed4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.921480] env[67424]: DEBUG oslo_concurrency.lockutils [req-9dd77e5a-8bb3-43a7-9e5e-4992288bd860 req-26b1ee4e-2558-46ff-8a6f-698bcf2c043e service nova] Lock "ef935349-cb7c-4aaa-a735-a010501c5ed4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.921480] env[67424]: DEBUG oslo_concurrency.lockutils [req-9dd77e5a-8bb3-43a7-9e5e-4992288bd860 req-26b1ee4e-2558-46ff-8a6f-698bcf2c043e service nova] Lock "ef935349-cb7c-4aaa-a735-a010501c5ed4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.922311] env[67424]: DEBUG nova.compute.manager [req-9dd77e5a-8bb3-43a7-9e5e-4992288bd860 req-26b1ee4e-2558-46ff-8a6f-698bcf2c043e service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] No waiting events found dispatching network-vif-plugged-998539a9-9dd1-40fb-9a6c-14761c151b3b {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 661.922659] env[67424]: WARNING nova.compute.manager [req-9dd77e5a-8bb3-43a7-9e5e-4992288bd860 req-26b1ee4e-2558-46ff-8a6f-698bcf2c043e service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Received unexpected event network-vif-plugged-998539a9-9dd1-40fb-9a6c-14761c151b3b for instance with vm_state building and task_state spawning. [ 661.926156] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.262686] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Successfully created port: eb2e066d-2e81-4678-827f-1f5ae7a97f68 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.349074] env[67424]: DEBUG nova.compute.manager [req-0ec49fb6-d005-4c5f-83cd-cd2d00d0bd5f req-69d8be57-c745-403a-8abe-9b22e399d277 service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Received event network-vif-plugged-7993e4fb-7427-4089-aae4-76593ed601ce {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 662.349253] env[67424]: DEBUG oslo_concurrency.lockutils [req-0ec49fb6-d005-4c5f-83cd-cd2d00d0bd5f req-69d8be57-c745-403a-8abe-9b22e399d277 service nova] Acquiring lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.350538] env[67424]: DEBUG oslo_concurrency.lockutils [req-0ec49fb6-d005-4c5f-83cd-cd2d00d0bd5f req-69d8be57-c745-403a-8abe-9b22e399d277 service nova] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.350777] env[67424]: DEBUG oslo_concurrency.lockutils [req-0ec49fb6-d005-4c5f-83cd-cd2d00d0bd5f req-69d8be57-c745-403a-8abe-9b22e399d277 service nova] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.351050] env[67424]: DEBUG nova.compute.manager [req-0ec49fb6-d005-4c5f-83cd-cd2d00d0bd5f req-69d8be57-c745-403a-8abe-9b22e399d277 service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] No waiting events found dispatching network-vif-plugged-7993e4fb-7427-4089-aae4-76593ed601ce {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 662.351155] env[67424]: WARNING nova.compute.manager [req-0ec49fb6-d005-4c5f-83cd-cd2d00d0bd5f req-69d8be57-c745-403a-8abe-9b22e399d277 service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Received unexpected event network-vif-plugged-7993e4fb-7427-4089-aae4-76593ed601ce for instance with vm_state building and task_state spawning. [ 662.463104] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.463104] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.479316] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 662.561913] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.562171] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.563685] env[67424]: INFO nova.compute.claims [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.825369] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225855b0-564e-4ef4-a225-63e9ef9cc3e7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.837622] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbc1df3-1fc1-4eb2-86ae-2e5010b50b53 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.877553] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f3d097-94d4-44d8-b3ad-7ac9ce6fe319 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.886232] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b808dff-1619-47b6-ab53-23bb5749876d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.901221] env[67424]: DEBUG nova.compute.provider_tree [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.917569] env[67424]: DEBUG nova.scheduler.client.report [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 662.942389] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.380s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.942706] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 662.993052] env[67424]: DEBUG nova.compute.utils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.994398] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 662.994564] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 663.007660] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 663.093977] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 663.137288] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=192,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 663.139268] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 663.139268] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.139268] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 663.139268] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.139268] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 663.139496] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 663.139496] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 663.139496] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 663.139496] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 663.139496] env[67424]: DEBUG nova.virt.hardware [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 663.140852] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8128f9d-c909-40ee-a825-88ab80ac61ee {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.151046] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e82ddd3-2223-4a41-b2aa-485c49c75fc4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.176015] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Updating instance_info_cache with network_info: [{"id": "998539a9-9dd1-40fb-9a6c-14761c151b3b", "address": "fa:16:3e:5a:6d:07", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998539a9-9d", "ovs_interfaceid": "998539a9-9dd1-40fb-9a6c-14761c151b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.186762] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Updating instance_info_cache with network_info: [{"id": "7993e4fb-7427-4089-aae4-76593ed601ce", "address": "fa:16:3e:99:bf:63", "network": {"id": "7693bf38-49e5-4d6f-a69d-2d77c5a39084", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-366176508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1cdda45146654ae89298886a4c347472", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7993e4fb-74", "ovs_interfaceid": "7993e4fb-7427-4089-aae4-76593ed601ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.200030] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "refresh_cache-ef935349-cb7c-4aaa-a735-a010501c5ed4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.200030] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Instance network_info: |[{"id": "998539a9-9dd1-40fb-9a6c-14761c151b3b", "address": "fa:16:3e:5a:6d:07", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998539a9-9d", "ovs_interfaceid": "998539a9-9dd1-40fb-9a6c-14761c151b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 663.200361] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:6d:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '998539a9-9dd1-40fb-9a6c-14761c151b3b', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.217144] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating folder: Project (849b2bdca65b4c4ca62478339dba3db1). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.218067] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f788d205-bfbf-4a8f-ac77-f5b0da643600 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.222632] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Releasing lock "refresh_cache-f9097bb5-5320-49e6-9c9a-6397a176a5a3" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.222908] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Instance network_info: |[{"id": "7993e4fb-7427-4089-aae4-76593ed601ce", "address": "fa:16:3e:99:bf:63", "network": {"id": "7693bf38-49e5-4d6f-a69d-2d77c5a39084", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-366176508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1cdda45146654ae89298886a4c347472", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7993e4fb-74", "ovs_interfaceid": "7993e4fb-7427-4089-aae4-76593ed601ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 663.223715] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:bf:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7993e4fb-7427-4089-aae4-76593ed601ce', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.234194] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Creating folder: Project (1cdda45146654ae89298886a4c347472). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.236553] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1ef73b5-b5e6-4ab3-917a-902b94f71ff9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.238892] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created folder: Project (849b2bdca65b4c4ca62478339dba3db1) in parent group-v639843. [ 663.239089] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating folder: Instances. Parent ref: group-v639850. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.240283] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52e66ca5-1925-4e4e-8afb-a2872133b44d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.250415] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Created folder: Project (1cdda45146654ae89298886a4c347472) in parent group-v639843. [ 663.250653] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Creating folder: Instances. Parent ref: group-v639851. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.251050] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0eeecec-701e-4a38-9b64-bc88b3a1cecd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.256028] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created folder: Instances in parent group-v639850. [ 663.258145] env[67424]: DEBUG oslo.service.loopingcall [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.258145] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 663.258145] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e148845-148f-4c16-8f85-e2e818db0d4c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.274343] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Created folder: Instances in parent group-v639851. [ 663.274578] env[67424]: DEBUG oslo.service.loopingcall [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.275471] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 663.275471] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b6d4ffa-61a8-40ab-8c4e-e177b7169914 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.296459] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.296459] env[67424]: value = "task-3199894" [ 663.296459] env[67424]: _type = "Task" [ 663.296459] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.301869] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.301869] env[67424]: value = "task-3199895" [ 663.301869] env[67424]: _type = "Task" [ 663.301869] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.310354] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199894, 'name': CreateVM_Task} progress is 10%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.315344] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199895, 'name': CreateVM_Task} progress is 5%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.741430] env[67424]: DEBUG nova.policy [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b60d85e24bb14f5c8fdd4a4a610dcaee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '849b2bdca65b4c4ca62478339dba3db1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 663.743296] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Successfully created port: c8621464-4d3e-4350-a060-a7bae8f31560 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.817330] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199895, 'name': CreateVM_Task, 'duration_secs': 0.356065} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.821376] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 663.821671] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199894, 'name': CreateVM_Task, 'duration_secs': 0.345043} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.827015] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 663.859845] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.859845] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.859845] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.859845] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96b62109-8b03-4b2f-906a-cf4c9cae72ab {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.862675] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.866636] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 663.866636] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b70bde-c4d9-d895-ee99-b112d3f1f29c" [ 663.866636] env[67424]: _type = "Task" [ 663.866636] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.878829] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b70bde-c4d9-d895-ee99-b112d3f1f29c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.381756] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.382234] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.382425] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.382653] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.383876] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 664.383876] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71a27d10-f4f7-4b7b-98cd-25ec8c503aea {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.390158] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Waiting for the task: (returnval){ [ 664.390158] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52fdfd83-5f4d-51f5-37e2-f2fe317fe34e" [ 664.390158] env[67424]: _type = "Task" [ 664.390158] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.402355] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52fdfd83-5f4d-51f5-37e2-f2fe317fe34e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.781390] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Successfully updated port: 4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 664.807847] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "refresh_cache-dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.807847] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired lock "refresh_cache-dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.807847] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 664.903218] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.903483] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.903703] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.938687] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.358314] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Successfully updated port: 7a7feac0-d9ef-4db8-8fc6-a6159036c572 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.373715] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "refresh_cache-5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.374155] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquired lock "refresh_cache-5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.374349] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 665.548710] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.851070] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Updating instance_info_cache with network_info: [{"id": "4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d", "address": "fa:16:3e:06:dc:7e", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ef8ac24-e9", "ovs_interfaceid": "4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.864399] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Releasing lock "refresh_cache-dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.864724] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Instance network_info: |[{"id": "4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d", "address": "fa:16:3e:06:dc:7e", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ef8ac24-e9", "ovs_interfaceid": "4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 665.865201] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:dc:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 665.883639] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating folder: Project (bb47f8f5ec7645adbd5f78719656f0e6). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 665.885060] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e2c7155-3ae8-44ce-86f2-955b32cb2ff9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.896170] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Created folder: Project (bb47f8f5ec7645adbd5f78719656f0e6) in parent group-v639843. [ 665.896352] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating folder: Instances. Parent ref: group-v639856. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 665.896599] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c1e200c-240d-4b26-9844-05ad9d3b1ae3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.906759] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Created folder: Instances in parent group-v639856. [ 665.907055] env[67424]: DEBUG oslo.service.loopingcall [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 665.907055] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 665.907299] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-875d50e9-81e0-415f-a2bd-b53abf145aaf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.929580] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 665.929580] env[67424]: value = "task-3199898" [ 665.929580] env[67424]: _type = "Task" [ 665.929580] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.940074] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199898, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.396059] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.397741] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.397741] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 666.397741] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 666.421625] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.422112] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.422112] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.422994] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.423625] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.423708] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.423866] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.423997] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.424132] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 666.424714] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 666.424792] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.427129] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.427129] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.427129] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.427129] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.427129] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.427129] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 666.427550] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.442796] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199898, 'name': CreateVM_Task, 'duration_secs': 0.343104} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.443843] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 666.447504] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.447504] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.447504] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.447504] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 666.448310] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.448583] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.449294] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 666.451448] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf5f746-fd58-40a7-8edb-64990c984162 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.455024] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-648a4b34-f68f-494a-8f14-427a0465b342 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.465774] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e940e518-9759-42dc-90fc-dc9b18b695c2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.469881] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 666.469881] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52db6aed-161c-d38b-0e07-b983f567b117" [ 666.469881] env[67424]: _type = "Task" [ 666.469881] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.489179] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2891fce6-8537-48ee-b70a-d27fc4e3837a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.496346] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.496346] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 666.496425] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.499609] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0b72ec-5ea8-4bf3-843e-58c33c1d68a2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.537057] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181013MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 666.537332] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.537471] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.574137] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Successfully updated port: c8621464-4d3e-4350-a060-a7bae8f31560 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.586676] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "refresh_cache-8556fc83-206e-4e50-bd54-4185132497a7" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.586676] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquired lock "refresh_cache-8556fc83-206e-4e50-bd54-4185132497a7" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.586676] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 666.652546] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f9097bb5-5320-49e6-9c9a-6397a176a5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.652703] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ef935349-cb7c-4aaa-a735-a010501c5ed4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.652833] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0786e0b6-8cc3-4577-b1b0-d01a8361666e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.652967] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance eae4bf0e-4a44-4929-92f7-e4f4b6966187 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.653161] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.653285] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.653403] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.653514] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8556fc83-206e-4e50-bd54-4185132497a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.656199] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 666.656461] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 666.656609] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 666.722098] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.756406] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Successfully created port: f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.839161] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae6e6f0-8047-43ac-b0e3-891c5341e1e4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.850912] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b72570d-70e3-42d8-b674-f4ac71a657a2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.886780] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c572723-a3a5-4284-8a53-c683ab381615 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.899900] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114dd3b1-c9ba-4fb0-ba8c-1dd4b50a8ace {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.915240] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.928489] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 666.953839] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 666.954173] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.417s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.096136] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Updating instance_info_cache with network_info: [{"id": "7a7feac0-d9ef-4db8-8fc6-a6159036c572", "address": "fa:16:3e:ad:9a:21", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7feac0-d9", "ovs_interfaceid": "7a7feac0-d9ef-4db8-8fc6-a6159036c572", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.110827] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Releasing lock "refresh_cache-5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.111999] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Instance network_info: |[{"id": "7a7feac0-d9ef-4db8-8fc6-a6159036c572", "address": "fa:16:3e:ad:9a:21", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7feac0-d9", "ovs_interfaceid": "7a7feac0-d9ef-4db8-8fc6-a6159036c572", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 667.112523] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:9a:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a7feac0-d9ef-4db8-8fc6-a6159036c572', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.120690] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Creating folder: Project (2b119167e1684468860a03ea4a053ffb). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.121281] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-991ead01-af3f-4c4f-9e9d-99d417764c2f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.133253] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Created folder: Project (2b119167e1684468860a03ea4a053ffb) in parent group-v639843. [ 667.133510] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Creating folder: Instances. Parent ref: group-v639859. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.133987] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-239c199c-2d33-47ac-86a6-6ece82ea2c4c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.144886] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Created folder: Instances in parent group-v639859. [ 667.145184] env[67424]: DEBUG oslo.service.loopingcall [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.145377] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 667.145583] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91a9656e-7b5e-489e-a446-3559abe6bc97 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.165692] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.165692] env[67424]: value = "task-3199901" [ 667.165692] env[67424]: _type = "Task" [ 667.165692] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.177513] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199901, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.685046] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199901, 'name': CreateVM_Task, 'duration_secs': 0.349062} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.685452] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 667.686167] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.686344] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.686663] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 667.686912] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe99516f-712e-4a0a-9b0d-74144949d32c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.692236] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Waiting for the task: (returnval){ [ 667.692236] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528b576c-bf9f-32f3-7e6c-53dfd58cbd1c" [ 667.692236] env[67424]: _type = "Task" [ 667.692236] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.704061] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528b576c-bf9f-32f3-7e6c-53dfd58cbd1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.708041] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Successfully updated port: eb2e066d-2e81-4678-827f-1f5ae7a97f68 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.722496] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "refresh_cache-cf9e3c04-b1be-41a3-b408-de1f48fa96c6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.723124] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "refresh_cache-cf9e3c04-b1be-41a3-b408-de1f48fa96c6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.723124] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.862275] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.884849] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Updating instance_info_cache with network_info: [{"id": "c8621464-4d3e-4350-a060-a7bae8f31560", "address": "fa:16:3e:0f:28:d6", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8621464-4d", "ovs_interfaceid": "c8621464-4d3e-4350-a060-a7bae8f31560", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.902380] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Releasing lock "refresh_cache-8556fc83-206e-4e50-bd54-4185132497a7" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.902380] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Instance network_info: |[{"id": "c8621464-4d3e-4350-a060-a7bae8f31560", "address": "fa:16:3e:0f:28:d6", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8621464-4d", "ovs_interfaceid": "c8621464-4d3e-4350-a060-a7bae8f31560", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 667.902512] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:28:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8621464-4d3e-4350-a060-a7bae8f31560', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.909813] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Creating folder: Project (6c83392692ea44a5a9098a96e9aa0332). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.910718] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e54a359-05ce-4226-926c-d0abd33dd715 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.920889] env[67424]: DEBUG nova.compute.manager [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Received event network-changed-7993e4fb-7427-4089-aae4-76593ed601ce {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 667.921133] env[67424]: DEBUG nova.compute.manager [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Refreshing instance network info cache due to event network-changed-7993e4fb-7427-4089-aae4-76593ed601ce. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 667.921240] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Acquiring lock "refresh_cache-f9097bb5-5320-49e6-9c9a-6397a176a5a3" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.921487] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Acquired lock "refresh_cache-f9097bb5-5320-49e6-9c9a-6397a176a5a3" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.921556] env[67424]: DEBUG nova.network.neutron [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Refreshing network info cache for port 7993e4fb-7427-4089-aae4-76593ed601ce {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 667.926319] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Created folder: Project (6c83392692ea44a5a9098a96e9aa0332) in parent group-v639843. [ 667.926319] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Creating folder: Instances. Parent ref: group-v639862. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.926721] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-563dc20f-54ae-400f-ac9c-0518df4c4a95 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.938172] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Created folder: Instances in parent group-v639862. [ 667.938416] env[67424]: DEBUG oslo.service.loopingcall [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.939304] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 667.939582] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22fda30a-015a-4d55-a34f-2e7909212c3f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.962759] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.962759] env[67424]: value = "task-3199904" [ 667.962759] env[67424]: _type = "Task" [ 667.962759] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.971986] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199904, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.161743] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Updating instance_info_cache with network_info: [{"id": "eb2e066d-2e81-4678-827f-1f5ae7a97f68", "address": "fa:16:3e:f6:8e:93", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2e066d-2e", "ovs_interfaceid": "eb2e066d-2e81-4678-827f-1f5ae7a97f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.180678] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "refresh_cache-cf9e3c04-b1be-41a3-b408-de1f48fa96c6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.183874] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Instance network_info: |[{"id": "eb2e066d-2e81-4678-827f-1f5ae7a97f68", "address": "fa:16:3e:f6:8e:93", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2e066d-2e", "ovs_interfaceid": "eb2e066d-2e81-4678-827f-1f5ae7a97f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 668.183996] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:8e:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb2e066d-2e81-4678-827f-1f5ae7a97f68', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.193876] env[67424]: DEBUG oslo.service.loopingcall [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.195730] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 668.197338] env[67424]: DEBUG nova.compute.manager [req-db781700-a564-4afd-b6c8-006f7cc834a5 req-0d269d79-86a6-4015-b0f9-bba7f021e466 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Received event network-vif-plugged-7a7feac0-d9ef-4db8-8fc6-a6159036c572 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 668.197733] env[67424]: DEBUG oslo_concurrency.lockutils [req-db781700-a564-4afd-b6c8-006f7cc834a5 req-0d269d79-86a6-4015-b0f9-bba7f021e466 service nova] Acquiring lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.201485] env[67424]: DEBUG oslo_concurrency.lockutils [req-db781700-a564-4afd-b6c8-006f7cc834a5 req-0d269d79-86a6-4015-b0f9-bba7f021e466 service nova] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.201485] env[67424]: DEBUG oslo_concurrency.lockutils [req-db781700-a564-4afd-b6c8-006f7cc834a5 req-0d269d79-86a6-4015-b0f9-bba7f021e466 service nova] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.201485] env[67424]: DEBUG nova.compute.manager [req-db781700-a564-4afd-b6c8-006f7cc834a5 req-0d269d79-86a6-4015-b0f9-bba7f021e466 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] No waiting events found dispatching network-vif-plugged-7a7feac0-d9ef-4db8-8fc6-a6159036c572 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 668.203423] env[67424]: WARNING nova.compute.manager [req-db781700-a564-4afd-b6c8-006f7cc834a5 req-0d269d79-86a6-4015-b0f9-bba7f021e466 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Received unexpected event network-vif-plugged-7a7feac0-d9ef-4db8-8fc6-a6159036c572 for instance with vm_state building and task_state spawning. [ 668.207933] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-747b65a7-9173-4db8-97a6-daa69bbc1568 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.235265] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.235620] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.235739] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.235938] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.235938] env[67424]: value = "task-3199905" [ 668.235938] env[67424]: _type = "Task" [ 668.235938] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.245463] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199905, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.403615] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.403853] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.419164] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 668.475758] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199904, 'name': CreateVM_Task, 'duration_secs': 0.378524} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.475758] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 668.476513] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.476794] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.477238] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 668.478121] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d735d1b0-aedb-4fc3-897e-d0b8b971f50d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.487945] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Waiting for the task: (returnval){ [ 668.487945] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52c1f88d-e1a9-4dca-451b-e369d1725fd1" [ 668.487945] env[67424]: _type = "Task" [ 668.487945] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.501274] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52c1f88d-e1a9-4dca-451b-e369d1725fd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.512822] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.513020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.516116] env[67424]: INFO nova.compute.claims [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.758030] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199905, 'name': CreateVM_Task, 'duration_secs': 0.506116} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.758030] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 668.758030] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.825084] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3506d70b-e341-44dc-b77f-98bd905cc619 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.839869] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456abbf5-dc74-428e-b01b-e426f14d7a35 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.878197] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56f0cb5-80fa-49d3-a1f5-2164d5bd3f72 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.887595] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed52c33a-104a-4e78-9e37-01ee18a03a83 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.906528] env[67424]: DEBUG nova.compute.provider_tree [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.924285] env[67424]: DEBUG nova.scheduler.client.report [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 668.958182] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.443s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.958182] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 669.004272] env[67424]: DEBUG nova.compute.utils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 669.009633] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.009633] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.009633] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.009633] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 669.009797] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 669.011428] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.012896] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 669.012896] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bc5d8ef-2fed-4f1d-8c1d-1a3b055283c9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.020266] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 669.020266] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d0e3f5-4103-26e4-8a4b-465a5b477734" [ 669.020266] env[67424]: _type = "Task" [ 669.020266] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.030692] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d0e3f5-4103-26e4-8a4b-465a5b477734, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.030984] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 669.151421] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 669.182938] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 669.183489] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 669.183489] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.183612] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 669.183654] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.183815] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 669.187982] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 669.188251] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 669.188455] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 669.188626] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 669.188799] env[67424]: DEBUG nova.virt.hardware [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.189792] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f97e3bd-32eb-493c-bdbc-ae0a0b5e854c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.198553] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa3eccd-686e-462a-b78c-25cbf293c535 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.241302] env[67424]: DEBUG nova.policy [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '036fd96f154c46809d43fb326963d8d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc8604149f5e441daed48659b37271cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 669.342753] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Successfully updated port: f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.361847] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "refresh_cache-bb35e63a-3fb6-4a2a-8037-3fcc16def092" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.363983] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "refresh_cache-bb35e63a-3fb6-4a2a-8037-3fcc16def092" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.363983] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 669.533778] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.534059] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.534323] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.676418] env[67424]: DEBUG nova.network.neutron [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Updated VIF entry in instance network info cache for port 7993e4fb-7427-4089-aae4-76593ed601ce. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 669.676838] env[67424]: DEBUG nova.network.neutron [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Updating instance_info_cache with network_info: [{"id": "7993e4fb-7427-4089-aae4-76593ed601ce", "address": "fa:16:3e:99:bf:63", "network": {"id": "7693bf38-49e5-4d6f-a69d-2d77c5a39084", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-366176508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1cdda45146654ae89298886a4c347472", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7993e4fb-74", "ovs_interfaceid": "7993e4fb-7427-4089-aae4-76593ed601ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.688354] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Releasing lock "refresh_cache-f9097bb5-5320-49e6-9c9a-6397a176a5a3" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.688604] env[67424]: DEBUG nova.compute.manager [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Received event network-changed-998539a9-9dd1-40fb-9a6c-14761c151b3b {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 669.688772] env[67424]: DEBUG nova.compute.manager [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Refreshing instance network info cache due to event network-changed-998539a9-9dd1-40fb-9a6c-14761c151b3b. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 669.688982] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Acquiring lock "refresh_cache-ef935349-cb7c-4aaa-a735-a010501c5ed4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.689584] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Acquired lock "refresh_cache-ef935349-cb7c-4aaa-a735-a010501c5ed4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.689765] env[67424]: DEBUG nova.network.neutron [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Refreshing network info cache for port 998539a9-9dd1-40fb-9a6c-14761c151b3b {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 669.701092] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 670.364136] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Updating instance_info_cache with network_info: [{"id": "f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633", "address": "fa:16:3e:8d:d5:14", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3d81b2a-0f", "ovs_interfaceid": "f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.377396] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "refresh_cache-bb35e63a-3fb6-4a2a-8037-3fcc16def092" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.377690] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Instance network_info: |[{"id": "f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633", "address": "fa:16:3e:8d:d5:14", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3d81b2a-0f", "ovs_interfaceid": "f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 670.378110] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:d5:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.387795] env[67424]: DEBUG oslo.service.loopingcall [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.388948] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 670.389369] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11d45aa3-2edb-43fb-b5c8-3b30e5857f1f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.420933] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.420933] env[67424]: value = "task-3199906" [ 670.420933] env[67424]: _type = "Task" [ 670.420933] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.432325] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199906, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.619037] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Successfully created port: 1e6065c7-7996-449c-ae7b-5b459af2501e {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 670.943117] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199906, 'name': CreateVM_Task, 'duration_secs': 0.424505} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.943117] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 670.943946] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.944354] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.944531] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 670.944826] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be7d8eba-3411-4031-b342-a2533c5d0c22 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.951448] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 670.951448] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528e9c1e-4c7f-1815-f023-308d478b92c9" [ 670.951448] env[67424]: _type = "Task" [ 670.951448] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.964026] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528e9c1e-4c7f-1815-f023-308d478b92c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.988312] env[67424]: DEBUG nova.network.neutron [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Updated VIF entry in instance network info cache for port 998539a9-9dd1-40fb-9a6c-14761c151b3b. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 670.988740] env[67424]: DEBUG nova.network.neutron [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Updating instance_info_cache with network_info: [{"id": "998539a9-9dd1-40fb-9a6c-14761c151b3b", "address": "fa:16:3e:5a:6d:07", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998539a9-9d", "ovs_interfaceid": "998539a9-9dd1-40fb-9a6c-14761c151b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.002527] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Releasing lock "refresh_cache-ef935349-cb7c-4aaa-a735-a010501c5ed4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.002527] env[67424]: DEBUG nova.compute.manager [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Received event network-vif-plugged-4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 671.002527] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Acquiring lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.002815] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.002815] env[67424]: DEBUG oslo_concurrency.lockutils [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.002999] env[67424]: DEBUG nova.compute.manager [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] No waiting events found dispatching network-vif-plugged-4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 671.004030] env[67424]: WARNING nova.compute.manager [req-7b72cbe4-72b4-4aa3-a937-5506f9d7c581 req-3d91a5ef-d56b-47bd-82eb-95e72306513b service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Received unexpected event network-vif-plugged-4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d for instance with vm_state building and task_state spawning. [ 671.038725] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquiring lock "0ac8dd6c-41e3-4749-9129-02688c3385cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.038991] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Lock "0ac8dd6c-41e3-4749-9129-02688c3385cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.464767] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.465113] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.465274] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.395009] env[67424]: DEBUG nova.compute.manager [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Received event network-changed-4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 672.396877] env[67424]: DEBUG nova.compute.manager [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Refreshing instance network info cache due to event network-changed-4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 672.396877] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Acquiring lock "refresh_cache-dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.396877] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Acquired lock "refresh_cache-dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.396877] env[67424]: DEBUG nova.network.neutron [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Refreshing network info cache for port 4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 673.575533] env[67424]: DEBUG nova.network.neutron [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Updated VIF entry in instance network info cache for port 4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 673.575879] env[67424]: DEBUG nova.network.neutron [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Updating instance_info_cache with network_info: [{"id": "4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d", "address": "fa:16:3e:06:dc:7e", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ef8ac24-e9", "ovs_interfaceid": "4ef8ac24-e9bf-4a80-b9d2-0f5e4262ad3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.589063] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Releasing lock "refresh_cache-dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.589345] env[67424]: DEBUG nova.compute.manager [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Received event network-vif-plugged-c8621464-4d3e-4350-a060-a7bae8f31560 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 673.589770] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Acquiring lock "8556fc83-206e-4e50-bd54-4185132497a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.589836] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Lock "8556fc83-206e-4e50-bd54-4185132497a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.590118] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Lock "8556fc83-206e-4e50-bd54-4185132497a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.590118] env[67424]: DEBUG nova.compute.manager [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] No waiting events found dispatching network-vif-plugged-c8621464-4d3e-4350-a060-a7bae8f31560 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 673.590299] env[67424]: WARNING nova.compute.manager [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Received unexpected event network-vif-plugged-c8621464-4d3e-4350-a060-a7bae8f31560 for instance with vm_state building and task_state spawning. [ 673.590461] env[67424]: DEBUG nova.compute.manager [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Received event network-changed-c8621464-4d3e-4350-a060-a7bae8f31560 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 673.590626] env[67424]: DEBUG nova.compute.manager [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Refreshing instance network info cache due to event network-changed-c8621464-4d3e-4350-a060-a7bae8f31560. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 673.590797] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Acquiring lock "refresh_cache-8556fc83-206e-4e50-bd54-4185132497a7" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.590932] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Acquired lock "refresh_cache-8556fc83-206e-4e50-bd54-4185132497a7" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.591213] env[67424]: DEBUG nova.network.neutron [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Refreshing network info cache for port c8621464-4d3e-4350-a060-a7bae8f31560 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 673.611251] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Successfully updated port: 1e6065c7-7996-449c-ae7b-5b459af2501e {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 673.625893] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "refresh_cache-91fdd93f-a6ef-44ad-b842-6d9b3173e626" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.625893] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquired lock "refresh_cache-91fdd93f-a6ef-44ad-b842-6d9b3173e626" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.625893] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 673.745035] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.754601] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Received event network-changed-7a7feac0-d9ef-4db8-8fc6-a6159036c572 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 673.754601] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Refreshing instance network info cache due to event network-changed-7a7feac0-d9ef-4db8-8fc6-a6159036c572. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 673.754601] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquiring lock "refresh_cache-5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.754775] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquired lock "refresh_cache-5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.754951] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Refreshing network info cache for port 7a7feac0-d9ef-4db8-8fc6-a6159036c572 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 674.412314] env[67424]: DEBUG nova.network.neutron [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Updated VIF entry in instance network info cache for port c8621464-4d3e-4350-a060-a7bae8f31560. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 674.412314] env[67424]: DEBUG nova.network.neutron [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Updating instance_info_cache with network_info: [{"id": "c8621464-4d3e-4350-a060-a7bae8f31560", "address": "fa:16:3e:0f:28:d6", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8621464-4d", "ovs_interfaceid": "c8621464-4d3e-4350-a060-a7bae8f31560", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.430932] env[67424]: DEBUG oslo_concurrency.lockutils [req-fcff23ad-3d58-410e-9133-4c500e5a76c8 req-23a5592b-33f2-4a64-a75e-83c9033a3eea service nova] Releasing lock "refresh_cache-8556fc83-206e-4e50-bd54-4185132497a7" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.587327] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "de7549c2-328b-4ab2-b590-c32f8a7d3261" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.588615] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.606457] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Updating instance_info_cache with network_info: [{"id": "1e6065c7-7996-449c-ae7b-5b459af2501e", "address": "fa:16:3e:cd:97:91", "network": {"id": "91dde4c1-0193-4de0-be6a-45e8a9f749a4", "bridge": "br-int", "label": "tempest-ServersTestJSON-2142730747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc8604149f5e441daed48659b37271cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e6065c7-79", "ovs_interfaceid": "1e6065c7-7996-449c-ae7b-5b459af2501e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.627866] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Releasing lock "refresh_cache-91fdd93f-a6ef-44ad-b842-6d9b3173e626" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.628190] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Instance network_info: |[{"id": "1e6065c7-7996-449c-ae7b-5b459af2501e", "address": "fa:16:3e:cd:97:91", "network": {"id": "91dde4c1-0193-4de0-be6a-45e8a9f749a4", "bridge": "br-int", "label": "tempest-ServersTestJSON-2142730747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc8604149f5e441daed48659b37271cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e6065c7-79", "ovs_interfaceid": "1e6065c7-7996-449c-ae7b-5b459af2501e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 674.628599] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:97:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4adc8ed0-d11a-4510-9be0-b27c0da3a903', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e6065c7-7996-449c-ae7b-5b459af2501e', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.637514] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Creating folder: Project (bc8604149f5e441daed48659b37271cb). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 674.640385] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b543667-e3a6-4075-827b-f4e9391bd1cf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.653755] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Created folder: Project (bc8604149f5e441daed48659b37271cb) in parent group-v639843. [ 674.653949] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Creating folder: Instances. Parent ref: group-v639867. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 674.654562] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3441a2e-a818-410c-b9b7-0a81f002223d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.669112] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Created folder: Instances in parent group-v639867. [ 674.669456] env[67424]: DEBUG oslo.service.loopingcall [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.671052] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 674.671822] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-275a8843-c35c-4da7-8fe8-a7a8ea5792ad {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.696982] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.696982] env[67424]: value = "task-3199909" [ 674.696982] env[67424]: _type = "Task" [ 674.696982] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.705433] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199909, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.810128] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Updated VIF entry in instance network info cache for port 7a7feac0-d9ef-4db8-8fc6-a6159036c572. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 674.810510] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Updating instance_info_cache with network_info: [{"id": "7a7feac0-d9ef-4db8-8fc6-a6159036c572", "address": "fa:16:3e:ad:9a:21", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7feac0-d9", "ovs_interfaceid": "7a7feac0-d9ef-4db8-8fc6-a6159036c572", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.830331] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Releasing lock "refresh_cache-5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.830636] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Received event network-vif-plugged-eb2e066d-2e81-4678-827f-1f5ae7a97f68 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 674.830826] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquiring lock "cf9e3c04-b1be-41a3-b408-de1f48fa96c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.831035] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Lock "cf9e3c04-b1be-41a3-b408-de1f48fa96c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.831194] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Lock "cf9e3c04-b1be-41a3-b408-de1f48fa96c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.831353] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] No waiting events found dispatching network-vif-plugged-eb2e066d-2e81-4678-827f-1f5ae7a97f68 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 674.831513] env[67424]: WARNING nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Received unexpected event network-vif-plugged-eb2e066d-2e81-4678-827f-1f5ae7a97f68 for instance with vm_state building and task_state spawning. [ 674.831685] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Received event network-changed-eb2e066d-2e81-4678-827f-1f5ae7a97f68 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 674.831841] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Refreshing instance network info cache due to event network-changed-eb2e066d-2e81-4678-827f-1f5ae7a97f68. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 674.832034] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquiring lock "refresh_cache-cf9e3c04-b1be-41a3-b408-de1f48fa96c6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.832175] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquired lock "refresh_cache-cf9e3c04-b1be-41a3-b408-de1f48fa96c6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.837573] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Refreshing network info cache for port eb2e066d-2e81-4678-827f-1f5ae7a97f68 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 675.211064] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199909, 'name': CreateVM_Task, 'duration_secs': 0.321392} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.211243] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 675.212507] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.212507] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.212507] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 675.213536] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-047a0c69-a674-48e2-90ee-b941d918fb20 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.221489] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Waiting for the task: (returnval){ [ 675.221489] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]526f7bed-311d-1a0c-597f-eba744b7167f" [ 675.221489] env[67424]: _type = "Task" [ 675.221489] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.231796] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]526f7bed-311d-1a0c-597f-eba744b7167f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.720096] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Updated VIF entry in instance network info cache for port eb2e066d-2e81-4678-827f-1f5ae7a97f68. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 675.720902] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Updating instance_info_cache with network_info: [{"id": "eb2e066d-2e81-4678-827f-1f5ae7a97f68", "address": "fa:16:3e:f6:8e:93", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb2e066d-2e", "ovs_interfaceid": "eb2e066d-2e81-4678-827f-1f5ae7a97f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.736718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.736978] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.737255] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.737614] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Releasing lock "refresh_cache-cf9e3c04-b1be-41a3-b408-de1f48fa96c6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.737831] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Received event network-vif-plugged-f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 675.738201] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquiring lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.738201] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.738380] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.740987] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] No waiting events found dispatching network-vif-plugged-f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 675.741274] env[67424]: WARNING nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Received unexpected event network-vif-plugged-f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633 for instance with vm_state building and task_state spawning. [ 675.741686] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Received event network-changed-f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 675.741686] env[67424]: DEBUG nova.compute.manager [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Refreshing instance network info cache due to event network-changed-f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 675.742132] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquiring lock "refresh_cache-bb35e63a-3fb6-4a2a-8037-3fcc16def092" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.742295] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Acquired lock "refresh_cache-bb35e63a-3fb6-4a2a-8037-3fcc16def092" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.742512] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Refreshing network info cache for port f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 676.506223] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Updated VIF entry in instance network info cache for port f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 676.506667] env[67424]: DEBUG nova.network.neutron [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Updating instance_info_cache with network_info: [{"id": "f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633", "address": "fa:16:3e:8d:d5:14", "network": {"id": "b7cf03ff-3528-4dfd-864b-33469d156265", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1964744790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "849b2bdca65b4c4ca62478339dba3db1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3d81b2a-0f", "ovs_interfaceid": "f3d81b2a-0f3f-4e9a-a5cc-1c6b73693633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.518883] env[67424]: DEBUG oslo_concurrency.lockutils [req-b4859dd6-2802-4bcf-85e6-096c273b8b63 req-c8da3ffb-bbfa-493f-9aa6-5357826ae0b6 service nova] Releasing lock "refresh_cache-bb35e63a-3fb6-4a2a-8037-3fcc16def092" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.038156] env[67424]: DEBUG nova.compute.manager [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Received event network-vif-plugged-1e6065c7-7996-449c-ae7b-5b459af2501e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 678.038156] env[67424]: DEBUG oslo_concurrency.lockutils [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] Acquiring lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.038156] env[67424]: DEBUG oslo_concurrency.lockutils [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.038156] env[67424]: DEBUG oslo_concurrency.lockutils [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.038761] env[67424]: DEBUG nova.compute.manager [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] No waiting events found dispatching network-vif-plugged-1e6065c7-7996-449c-ae7b-5b459af2501e {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 678.039196] env[67424]: WARNING nova.compute.manager [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Received unexpected event network-vif-plugged-1e6065c7-7996-449c-ae7b-5b459af2501e for instance with vm_state building and task_state spawning. [ 678.039474] env[67424]: DEBUG nova.compute.manager [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Received event network-changed-1e6065c7-7996-449c-ae7b-5b459af2501e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 678.039656] env[67424]: DEBUG nova.compute.manager [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Refreshing instance network info cache due to event network-changed-1e6065c7-7996-449c-ae7b-5b459af2501e. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 678.039871] env[67424]: DEBUG oslo_concurrency.lockutils [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] Acquiring lock "refresh_cache-91fdd93f-a6ef-44ad-b842-6d9b3173e626" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.040026] env[67424]: DEBUG oslo_concurrency.lockutils [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] Acquired lock "refresh_cache-91fdd93f-a6ef-44ad-b842-6d9b3173e626" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.040185] env[67424]: DEBUG nova.network.neutron [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Refreshing network info cache for port 1e6065c7-7996-449c-ae7b-5b459af2501e {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 678.779402] env[67424]: DEBUG nova.network.neutron [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Updated VIF entry in instance network info cache for port 1e6065c7-7996-449c-ae7b-5b459af2501e. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 678.779402] env[67424]: DEBUG nova.network.neutron [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Updating instance_info_cache with network_info: [{"id": "1e6065c7-7996-449c-ae7b-5b459af2501e", "address": "fa:16:3e:cd:97:91", "network": {"id": "91dde4c1-0193-4de0-be6a-45e8a9f749a4", "bridge": "br-int", "label": "tempest-ServersTestJSON-2142730747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc8604149f5e441daed48659b37271cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e6065c7-79", "ovs_interfaceid": "1e6065c7-7996-449c-ae7b-5b459af2501e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.795149] env[67424]: DEBUG oslo_concurrency.lockutils [req-13d0650b-c506-440c-a8fe-11637cf30be6 req-947f1809-70d5-4194-af92-cae937649c5a service nova] Releasing lock "refresh_cache-91fdd93f-a6ef-44ad-b842-6d9b3173e626" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.885959] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "924956a0-9a91-4870-a240-6a1d7868904b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.886511] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "924956a0-9a91-4870-a240-6a1d7868904b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.086482] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.086765] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.051758] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.052382] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.484144] env[67424]: DEBUG oslo_concurrency.lockutils [None req-bb9143fd-06e6-4e11-b0ef-234bf8a362e2 tempest-ImagesOneServerTestJSON-558811522 tempest-ImagesOneServerTestJSON-558811522-project-member] Acquiring lock "7d7fcc13-5e0b-421f-80a9-f7f37afa51b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.484375] env[67424]: DEBUG oslo_concurrency.lockutils [None req-bb9143fd-06e6-4e11-b0ef-234bf8a362e2 tempest-ImagesOneServerTestJSON-558811522 tempest-ImagesOneServerTestJSON-558811522-project-member] Lock "7d7fcc13-5e0b-421f-80a9-f7f37afa51b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.289043] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ca89d28-6600-4fef-839c-a4f296ce107d tempest-ServersWithSpecificFlavorTestJSON-306453774 tempest-ServersWithSpecificFlavorTestJSON-306453774-project-member] Acquiring lock "aeddb8eb-4ca6-4e91-a140-05badd9e685e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.290276] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ca89d28-6600-4fef-839c-a4f296ce107d tempest-ServersWithSpecificFlavorTestJSON-306453774 tempest-ServersWithSpecificFlavorTestJSON-306453774-project-member] Lock "aeddb8eb-4ca6-4e91-a140-05badd9e685e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.686103] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0ec8da25-2b62-4ef7-acbd-a7ac0a0746ae tempest-AttachInterfacesV270Test-325504290 tempest-AttachInterfacesV270Test-325504290-project-member] Acquiring lock "acf30dc7-38e0-486a-a54a-c6ce56ce1c57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.686416] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0ec8da25-2b62-4ef7-acbd-a7ac0a0746ae tempest-AttachInterfacesV270Test-325504290 tempest-AttachInterfacesV270Test-325504290-project-member] Lock "acf30dc7-38e0-486a-a54a-c6ce56ce1c57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.434363] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ed11387f-d442-4b14-96ac-30c45cd5a615 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "7e7454c0-a7b8-418e-90ab-f2ce85125b64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.434626] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ed11387f-d442-4b14-96ac-30c45cd5a615 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "7e7454c0-a7b8-418e-90ab-f2ce85125b64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.590743] env[67424]: WARNING oslo_vmware.rw_handles [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 703.590743] env[67424]: ERROR oslo_vmware.rw_handles [ 703.590743] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 703.591511] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 703.591511] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Copying Virtual Disk [datastore2] vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/c02a2810-969d-41b3-a3e5-81ee559e96a3/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 703.591888] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a28758b7-0066-4d34-a74b-1388cf019052 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.604315] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Waiting for the task: (returnval){ [ 703.604315] env[67424]: value = "task-3199910" [ 703.604315] env[67424]: _type = "Task" [ 703.604315] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.614939] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Task: {'id': task-3199910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.121416] env[67424]: DEBUG oslo_vmware.exceptions [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 704.121416] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.123503] env[67424]: ERROR nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 704.123503] env[67424]: Faults: ['InvalidArgument'] [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Traceback (most recent call last): [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] yield resources [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self.driver.spawn(context, instance, image_meta, [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self._fetch_image_if_missing(context, vi) [ 704.123503] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] image_cache(vi, tmp_image_ds_loc) [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] vm_util.copy_virtual_disk( [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] session._wait_for_task(vmdk_copy_task) [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] return self.wait_for_task(task_ref) [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] return evt.wait() [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] result = hub.switch() [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.123943] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] return self.greenlet.switch() [ 704.124478] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 704.124478] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self.f(*self.args, **self.kw) [ 704.124478] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 704.124478] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] raise exceptions.translate_fault(task_info.error) [ 704.124478] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 704.124478] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Faults: ['InvalidArgument'] [ 704.124478] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] [ 704.124478] env[67424]: INFO nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Terminating instance [ 704.128020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.128020] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 704.128020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquiring lock "refresh_cache-0786e0b6-8cc3-4577-b1b0-d01a8361666e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.128020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquired lock "refresh_cache-0786e0b6-8cc3-4577-b1b0-d01a8361666e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.128338] env[67424]: DEBUG nova.network.neutron [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 704.128338] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-624da297-7a8f-42bb-9a2e-c6579a23aef1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.136682] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 704.138022] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 704.138022] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c1a464f-bb5c-4c41-bb4c-33216a9169da {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.155441] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Waiting for the task: (returnval){ [ 704.155441] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52cf1522-4de2-a03a-edd0-eb8353077575" [ 704.155441] env[67424]: _type = "Task" [ 704.155441] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.163635] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52cf1522-4de2-a03a-edd0-eb8353077575, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.191816] env[67424]: DEBUG nova.network.neutron [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.451713] env[67424]: DEBUG nova.network.neutron [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.467030] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Releasing lock "refresh_cache-0786e0b6-8cc3-4577-b1b0-d01a8361666e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.467219] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 704.467368] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 704.468555] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ccd45a-83aa-4c38-9563-3978766bad58 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.480133] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 704.480455] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92c5ca46-e8c5-4497-b5f2-3338319a4a07 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.521863] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 704.522305] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 704.522391] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Deleting the datastore file [datastore2] 0786e0b6-8cc3-4577-b1b0-d01a8361666e {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.522589] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da5f303c-68e0-4635-87c3-ba9d0e9f052c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.531099] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Waiting for the task: (returnval){ [ 704.531099] env[67424]: value = "task-3199916" [ 704.531099] env[67424]: _type = "Task" [ 704.531099] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.545460] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Task: {'id': task-3199916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.666387] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 704.666667] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Creating directory with path [datastore2] vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 704.666905] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c985d5b-d6b1-4b87-97b6-3d5f24a80bba {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.689553] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Created directory with path [datastore2] vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 704.689819] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Fetch image to [datastore2] vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 704.690010] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 704.691918] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a4acdb-1be2-4158-9a81-96eaa979afe7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.699146] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3426d1f1-1b16-44c7-afa0-a869b14c0e05 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.710709] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a905b894-b6eb-482a-a9c3-d35bd253ac47 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.750664] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc1586f-6368-4aec-9cb9-bdc9155e97ef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.758292] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b2e33411-5e7e-416e-822a-8ae5b3fb2ebd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.780803] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 704.856314] env[67424]: DEBUG oslo_vmware.rw_handles [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 704.928618] env[67424]: DEBUG oslo_vmware.rw_handles [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 704.928909] env[67424]: DEBUG oslo_vmware.rw_handles [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 705.044856] env[67424]: DEBUG oslo_vmware.api [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Task: {'id': task-3199916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071966} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.045207] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 705.045897] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 705.046501] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 705.047058] env[67424]: INFO nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Took 0.58 seconds to destroy the instance on the hypervisor. [ 705.047417] env[67424]: DEBUG oslo.service.loopingcall [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 705.047728] env[67424]: DEBUG nova.compute.manager [-] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 705.050937] env[67424]: DEBUG nova.compute.claims [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 705.051426] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.051426] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.492628] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8bb96f-6f2f-47a0-b0ec-090ca822e12b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.506413] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52707498-70eb-466e-90b2-4d913c7e4db1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.560322] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7447fecb-d659-4a42-969a-2bc150c8475c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.570384] env[67424]: DEBUG oslo_concurrency.lockutils [None req-86b5b295-3f42-402b-ac62-7cb7c3758097 tempest-FloatingIPsAssociationTestJSON-1992332586 tempest-FloatingIPsAssociationTestJSON-1992332586-project-member] Acquiring lock "beb3c53f-a0ee-435e-9f95-c6bf0d68b872" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.570609] env[67424]: DEBUG oslo_concurrency.lockutils [None req-86b5b295-3f42-402b-ac62-7cb7c3758097 tempest-FloatingIPsAssociationTestJSON-1992332586 tempest-FloatingIPsAssociationTestJSON-1992332586-project-member] Lock "beb3c53f-a0ee-435e-9f95-c6bf0d68b872" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.576382] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67038cb1-f354-4e66-87be-7e81e12400ba {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.591609] env[67424]: DEBUG nova.compute.provider_tree [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.636051] env[67424]: ERROR nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [req-a963b25b-7247-4241-bfa8-452560d1b6af] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b21acede-6243-4c82-934a-a3956380220f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a963b25b-7247-4241-bfa8-452560d1b6af"}]}: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 705.659230] env[67424]: DEBUG nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 705.686175] env[67424]: DEBUG nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 705.686175] env[67424]: DEBUG nova.compute.provider_tree [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.703706] env[67424]: DEBUG nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 705.726552] env[67424]: DEBUG nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 706.121967] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b433d8-ac04-4832-962f-9da1617481f8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.129879] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1283c3dd-9aeb-4426-8431-e79f6d95d723 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.163958] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faa4256-7ca2-4582-8467-26c555cb54a6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.172495] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe48f02-5281-4f5f-9c2c-97b31a7ca400 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.188687] env[67424]: DEBUG nova.compute.provider_tree [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 706.235502] env[67424]: DEBUG nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Updated inventory for provider b21acede-6243-4c82-934a-a3956380220f with generation 24 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 706.235770] env[67424]: DEBUG nova.compute.provider_tree [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Updating resource provider b21acede-6243-4c82-934a-a3956380220f generation from 24 to 25 during operation: update_inventory {{(pid=67424) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 706.235955] env[67424]: DEBUG nova.compute.provider_tree [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 706.255732] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.204s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.256196] env[67424]: ERROR nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 706.256196] env[67424]: Faults: ['InvalidArgument'] [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Traceback (most recent call last): [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self.driver.spawn(context, instance, image_meta, [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self._fetch_image_if_missing(context, vi) [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] image_cache(vi, tmp_image_ds_loc) [ 706.256196] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] vm_util.copy_virtual_disk( [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] session._wait_for_task(vmdk_copy_task) [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] return self.wait_for_task(task_ref) [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] return evt.wait() [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] result = hub.switch() [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] return self.greenlet.switch() [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 706.256997] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] self.f(*self.args, **self.kw) [ 706.257474] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 706.257474] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] raise exceptions.translate_fault(task_info.error) [ 706.257474] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 706.257474] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Faults: ['InvalidArgument'] [ 706.257474] env[67424]: ERROR nova.compute.manager [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] [ 706.257474] env[67424]: DEBUG nova.compute.utils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 706.260332] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Build of instance 0786e0b6-8cc3-4577-b1b0-d01a8361666e was re-scheduled: A specified parameter was not correct: fileType [ 706.260332] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 706.260757] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 706.261162] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquiring lock "refresh_cache-0786e0b6-8cc3-4577-b1b0-d01a8361666e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.261768] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Acquired lock "refresh_cache-0786e0b6-8cc3-4577-b1b0-d01a8361666e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.261768] env[67424]: DEBUG nova.network.neutron [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.318247] env[67424]: DEBUG nova.network.neutron [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.572100] env[67424]: DEBUG nova.network.neutron [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.584229] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Releasing lock "refresh_cache-0786e0b6-8cc3-4577-b1b0-d01a8361666e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.584229] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 706.584229] env[67424]: DEBUG nova.compute.manager [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] [instance: 0786e0b6-8cc3-4577-b1b0-d01a8361666e] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 706.716764] env[67424]: INFO nova.scheduler.client.report [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Deleted allocations for instance 0786e0b6-8cc3-4577-b1b0-d01a8361666e [ 706.756838] env[67424]: DEBUG oslo_concurrency.lockutils [None req-43c82b6f-c453-4ec6-80e9-0c443aa7da80 tempest-ServerDiagnosticsV248Test-434141610 tempest-ServerDiagnosticsV248Test-434141610-project-member] Lock "0786e0b6-8cc3-4577-b1b0-d01a8361666e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.753s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.803168] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 706.883751] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.884053] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.885621] env[67424]: INFO nova.compute.claims [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.279262] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0f3b6e-fa0a-454f-8f62-52872edade1d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.288933] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129f427b-6565-4a1e-abbb-d641b5340fdb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.325307] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3963b7-8d0b-40b0-83d2-7bd6c044648d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.334604] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0391d6f7-4fa0-4d36-9011-ba9f2a85e340 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.353050] env[67424]: DEBUG nova.compute.provider_tree [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.365246] env[67424]: DEBUG nova.scheduler.client.report [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 707.387580] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.503s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.388436] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 707.452279] env[67424]: DEBUG nova.compute.utils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.453716] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 707.453763] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 707.469663] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 707.554827] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 707.588369] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 707.588534] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 707.588693] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.589057] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 707.590208] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.590474] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 707.590713] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 707.590908] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 707.591102] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 707.591276] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 707.591454] env[67424]: DEBUG nova.virt.hardware [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 707.593263] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910f27b0-6b3b-404d-98eb-94d28ea6c6ec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.601837] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14959a6f-07e4-410e-beb6-fe8a519b0793 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.696476] env[67424]: DEBUG nova.policy [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c65b6892b41142b58dc18f5b7b859e10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6c5bcd7ce3f4c50a8abd57721989c48', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 709.018767] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Successfully created port: 88130962-2a74-40ae-97e0-61b6a0efc875 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.609156] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "a7d131b6-3584-48c3-acce-d553c145a837" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.609156] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "a7d131b6-3584-48c3-acce-d553c145a837" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.106462] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Successfully updated port: 88130962-2a74-40ae-97e0-61b6a0efc875 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 711.121887] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquiring lock "refresh_cache-0ac8dd6c-41e3-4749-9129-02688c3385cc" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.121887] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquired lock "refresh_cache-0ac8dd6c-41e3-4749-9129-02688c3385cc" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.121887] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.148680] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4f1324e8-6c3e-45e8-9063-9fdae3dfaa4d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "39331dec-cd53-4cab-b48a-e4dd83f55b9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.148858] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4f1324e8-6c3e-45e8-9063-9fdae3dfaa4d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "39331dec-cd53-4cab-b48a-e4dd83f55b9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.404340] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.623965] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Updating instance_info_cache with network_info: [{"id": "88130962-2a74-40ae-97e0-61b6a0efc875", "address": "fa:16:3e:cd:cc:f7", "network": {"id": "e0da067e-a47e-40ce-9329-b5b489f06fd5", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1443865134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c5bcd7ce3f4c50a8abd57721989c48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88130962-2a", "ovs_interfaceid": "88130962-2a74-40ae-97e0-61b6a0efc875", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.645361] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Releasing lock "refresh_cache-0ac8dd6c-41e3-4749-9129-02688c3385cc" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.645361] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Instance network_info: |[{"id": "88130962-2a74-40ae-97e0-61b6a0efc875", "address": "fa:16:3e:cd:cc:f7", "network": {"id": "e0da067e-a47e-40ce-9329-b5b489f06fd5", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1443865134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c5bcd7ce3f4c50a8abd57721989c48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88130962-2a", "ovs_interfaceid": "88130962-2a74-40ae-97e0-61b6a0efc875", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 711.645518] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:cc:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '88130962-2a74-40ae-97e0-61b6a0efc875', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 711.652205] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Creating folder: Project (f6c5bcd7ce3f4c50a8abd57721989c48). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 711.652913] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-534a0008-e658-441e-a458-9f02f3d5417f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.666104] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Created folder: Project (f6c5bcd7ce3f4c50a8abd57721989c48) in parent group-v639843. [ 711.666104] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Creating folder: Instances. Parent ref: group-v639873. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 711.666104] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93a1b02f-b270-4601-a546-c6d8b90e6a88 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.674088] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Created folder: Instances in parent group-v639873. [ 711.674470] env[67424]: DEBUG oslo.service.loopingcall [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.674756] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 711.675077] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6beb0669-4702-4b17-9ebf-160de0726402 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.697307] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 711.697307] env[67424]: value = "task-3199922" [ 711.697307] env[67424]: _type = "Task" [ 711.697307] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.705391] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199922, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.209408] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199922, 'name': CreateVM_Task, 'duration_secs': 0.317754} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.209598] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 712.210821] env[67424]: DEBUG oslo_vmware.service [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755e6ebd-286a-49ac-bf0f-08ae033c2071 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.218343] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.218514] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.218796] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 712.219075] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd1ad56a-fb74-4dd4-8ced-2e410a3c8be0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.224075] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Waiting for the task: (returnval){ [ 712.224075] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52020e64-2cb4-e94a-9224-06b892edeed2" [ 712.224075] env[67424]: _type = "Task" [ 712.224075] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.233252] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52020e64-2cb4-e94a-9224-06b892edeed2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.421954] env[67424]: DEBUG nova.compute.manager [req-df677da7-6ea2-4939-b0e4-dbc15a7e1056 req-6a17dc15-5440-4194-b1ae-0ab2fc54d392 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Received event network-vif-plugged-88130962-2a74-40ae-97e0-61b6a0efc875 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 712.422309] env[67424]: DEBUG oslo_concurrency.lockutils [req-df677da7-6ea2-4939-b0e4-dbc15a7e1056 req-6a17dc15-5440-4194-b1ae-0ab2fc54d392 service nova] Acquiring lock "0ac8dd6c-41e3-4749-9129-02688c3385cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.422745] env[67424]: DEBUG oslo_concurrency.lockutils [req-df677da7-6ea2-4939-b0e4-dbc15a7e1056 req-6a17dc15-5440-4194-b1ae-0ab2fc54d392 service nova] Lock "0ac8dd6c-41e3-4749-9129-02688c3385cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.422745] env[67424]: DEBUG oslo_concurrency.lockutils [req-df677da7-6ea2-4939-b0e4-dbc15a7e1056 req-6a17dc15-5440-4194-b1ae-0ab2fc54d392 service nova] Lock "0ac8dd6c-41e3-4749-9129-02688c3385cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.422863] env[67424]: DEBUG nova.compute.manager [req-df677da7-6ea2-4939-b0e4-dbc15a7e1056 req-6a17dc15-5440-4194-b1ae-0ab2fc54d392 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] No waiting events found dispatching network-vif-plugged-88130962-2a74-40ae-97e0-61b6a0efc875 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 712.423017] env[67424]: WARNING nova.compute.manager [req-df677da7-6ea2-4939-b0e4-dbc15a7e1056 req-6a17dc15-5440-4194-b1ae-0ab2fc54d392 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Received unexpected event network-vif-plugged-88130962-2a74-40ae-97e0-61b6a0efc875 for instance with vm_state building and task_state spawning. [ 712.734692] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.734957] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 712.735193] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.735343] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.735515] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.735761] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7d78d46-26b5-451b-84b1-42bafd904c3b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.744065] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.744262] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 712.745025] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a6867a-d59d-4b23-b01f-78333d14c010 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.750834] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a5c684e-d6b5-46c4-a9db-46986857c1fd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.755499] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Waiting for the task: (returnval){ [ 712.755499] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52bcc900-804b-8435-b3a0-3a2bf0ad36cf" [ 712.755499] env[67424]: _type = "Task" [ 712.755499] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.762630] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52bcc900-804b-8435-b3a0-3a2bf0ad36cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.270748] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 713.271153] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Creating directory with path [datastore1] vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 713.271353] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4e3f520-842a-4542-bd33-76b79415e104 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.280328] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e1aab40-c4a5-4b90-a164-314fe6d3dbb8 tempest-ImagesOneServerNegativeTestJSON-96875994 tempest-ImagesOneServerNegativeTestJSON-96875994-project-member] Acquiring lock "69d8f97f-f58d-4185-95fd-05ed6a6b52d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.280328] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e1aab40-c4a5-4b90-a164-314fe6d3dbb8 tempest-ImagesOneServerNegativeTestJSON-96875994 tempest-ImagesOneServerNegativeTestJSON-96875994-project-member] Lock "69d8f97f-f58d-4185-95fd-05ed6a6b52d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.292540] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Created directory with path [datastore1] vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 713.292759] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Fetch image to [datastore1] vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 713.292931] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore1] vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore1 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 713.294093] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12e6162-d353-4e1f-9dcf-54db507e02e5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.303146] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772fee46-c4c7-42a1-aaed-93519af891ef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.313370] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e6f6b6-3fe8-4dee-ad77-0e98f88fba16 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.344384] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1853ae9b-1d37-4396-90ad-0eae959eef78 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.352020] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4c6613f4-2868-4ef0-9592-84903d70b581 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.382038] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore1 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 713.451450] env[67424]: DEBUG oslo_vmware.rw_handles [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 713.518212] env[67424]: DEBUG oslo_vmware.rw_handles [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 713.518477] env[67424]: DEBUG oslo_vmware.rw_handles [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 714.993303] env[67424]: DEBUG nova.compute.manager [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Received event network-changed-88130962-2a74-40ae-97e0-61b6a0efc875 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 714.993303] env[67424]: DEBUG nova.compute.manager [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Refreshing instance network info cache due to event network-changed-88130962-2a74-40ae-97e0-61b6a0efc875. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 714.993303] env[67424]: DEBUG oslo_concurrency.lockutils [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] Acquiring lock "refresh_cache-0ac8dd6c-41e3-4749-9129-02688c3385cc" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.993303] env[67424]: DEBUG oslo_concurrency.lockutils [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] Acquired lock "refresh_cache-0ac8dd6c-41e3-4749-9129-02688c3385cc" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.993303] env[67424]: DEBUG nova.network.neutron [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Refreshing network info cache for port 88130962-2a74-40ae-97e0-61b6a0efc875 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 715.926423] env[67424]: DEBUG nova.network.neutron [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Updated VIF entry in instance network info cache for port 88130962-2a74-40ae-97e0-61b6a0efc875. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 715.926779] env[67424]: DEBUG nova.network.neutron [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Updating instance_info_cache with network_info: [{"id": "88130962-2a74-40ae-97e0-61b6a0efc875", "address": "fa:16:3e:cd:cc:f7", "network": {"id": "e0da067e-a47e-40ce-9329-b5b489f06fd5", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1443865134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c5bcd7ce3f4c50a8abd57721989c48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88130962-2a", "ovs_interfaceid": "88130962-2a74-40ae-97e0-61b6a0efc875", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.941820] env[67424]: DEBUG oslo_concurrency.lockutils [req-cf44bd13-9823-4b96-a09b-4daf242fb379 req-e9160ac0-22c4-4a85-b8b6-058451e304d3 service nova] Releasing lock "refresh_cache-0ac8dd6c-41e3-4749-9129-02688c3385cc" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.392457] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2f4aafb5-e940-48fc-bde7-d99e2843bf4b tempest-ServerAddressesTestJSON-2079908818 tempest-ServerAddressesTestJSON-2079908818-project-member] Acquiring lock "33e5f591-2ed7-4302-b7be-8b800cebd5f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.392457] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2f4aafb5-e940-48fc-bde7-d99e2843bf4b tempest-ServerAddressesTestJSON-2079908818 tempest-ServerAddressesTestJSON-2079908818-project-member] Lock "33e5f591-2ed7-4302-b7be-8b800cebd5f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.931162] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2ef0f44b-194c-488d-bd0a-1e5fc7239e9f tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] Acquiring lock "7912b146-0eae-4cf1-a19c-8b2ae94b22f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.931162] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2ef0f44b-194c-488d-bd0a-1e5fc7239e9f tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] Lock "7912b146-0eae-4cf1-a19c-8b2ae94b22f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.626686] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cc9b72f8-ca20-4d35-bbd9-8b5e8ca37198 tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] Acquiring lock "7eb21186-f497-4031-ad88-6b61608b1c3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.627011] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cc9b72f8-ca20-4d35-bbd9-8b5e8ca37198 tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] Lock "7eb21186-f497-4031-ad88-6b61608b1c3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.675600] env[67424]: DEBUG oslo_concurrency.lockutils [None req-bdf01269-36ed-4ce2-94c3-d5efbc525027 tempest-TenantUsagesTestJSON-26869185 tempest-TenantUsagesTestJSON-26869185-project-member] Acquiring lock "122dfe58-58f3-4d91-b0dd-f3dfd26bfaed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.676429] env[67424]: DEBUG oslo_concurrency.lockutils [None req-bdf01269-36ed-4ce2-94c3-d5efbc525027 tempest-TenantUsagesTestJSON-26869185 tempest-TenantUsagesTestJSON-26869185-project-member] Lock "122dfe58-58f3-4d91-b0dd-f3dfd26bfaed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.860297] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6f7e12f9-98fa-4ee8-b32a-1956c9c0dbe0 tempest-ServersTestManualDisk-2034689218 tempest-ServersTestManualDisk-2034689218-project-member] Acquiring lock "cf18eb94-40cd-4451-9cf9-a59679dc2231" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.860532] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6f7e12f9-98fa-4ee8-b32a-1956c9c0dbe0 tempest-ServersTestManualDisk-2034689218 tempest-ServersTestManualDisk-2034689218-project-member] Lock "cf18eb94-40cd-4451-9cf9-a59679dc2231" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.942732] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.971924] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.972132] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.976145] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.976647] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.976819] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.976965] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 726.977133] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 726.996330] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.996330] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.996330] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.996330] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 726.996602] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7481befe-878c-45a7-8a8e-4777f57f0bc8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.008102] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e72824-2da7-4771-9a2b-3b069e1eadb8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.024073] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fbd980-463e-4033-bd18-e454472198c0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.032763] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d5159e-4fa9-4948-897c-006f05adc245 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.070271] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181005MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 727.070485] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.070737] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.171035] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f9097bb5-5320-49e6-9c9a-6397a176a5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171035] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ef935349-cb7c-4aaa-a735-a010501c5ed4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171035] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance eae4bf0e-4a44-4929-92f7-e4f4b6966187 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171035] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171304] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171304] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171304] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8556fc83-206e-4e50-bd54-4185132497a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171304] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171459] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.171459] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0ac8dd6c-41e3-4749-9129-02688c3385cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 727.200861] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.245535] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.274780] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.287536] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.306901] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7d7fcc13-5e0b-421f-80a9-f7f37afa51b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.324511] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aeddb8eb-4ca6-4e91-a140-05badd9e685e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.338556] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance acf30dc7-38e0-486a-a54a-c6ce56ce1c57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.350815] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e7454c0-a7b8-418e-90ab-f2ce85125b64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.368879] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance beb3c53f-a0ee-435e-9f95-c6bf0d68b872 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.387794] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.399619] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 39331dec-cd53-4cab-b48a-e4dd83f55b9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.421354] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 69d8f97f-f58d-4185-95fd-05ed6a6b52d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.435923] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 33e5f591-2ed7-4302-b7be-8b800cebd5f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.464258] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6f30b1ea-be87-43b7-b29f-4af6193da475 tempest-ServerDiagnosticsTest-1248428146 tempest-ServerDiagnosticsTest-1248428146-project-member] Acquiring lock "ce3ac204-fc43-465d-9d08-b378c9df6275" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.467763] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6f30b1ea-be87-43b7-b29f-4af6193da475 tempest-ServerDiagnosticsTest-1248428146 tempest-ServerDiagnosticsTest-1248428146-project-member] Lock "ce3ac204-fc43-465d-9d08-b378c9df6275" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.470019] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7912b146-0eae-4cf1-a19c-8b2ae94b22f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.481626] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7eb21186-f497-4031-ad88-6b61608b1c3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.496868] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 122dfe58-58f3-4d91-b0dd-f3dfd26bfaed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.509316] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf18eb94-40cd-4451-9cf9-a59679dc2231 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 727.509983] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 727.509983] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 727.733563] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d76f902c-29c2-4f20-8a90-bb3eaa825e98 tempest-ServerActionsV293TestJSON-661984579 tempest-ServerActionsV293TestJSON-661984579-project-member] Acquiring lock "4974d062-87b0-4773-a207-34b88bfe8c07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.733860] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d76f902c-29c2-4f20-8a90-bb3eaa825e98 tempest-ServerActionsV293TestJSON-661984579 tempest-ServerActionsV293TestJSON-661984579-project-member] Lock "4974d062-87b0-4773-a207-34b88bfe8c07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.974253] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b9beab-21c1-4b16-a301-52b083d29c15 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.983671] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23567358-c0f3-4db9-91b1-347b1b5e1a62 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.020415] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffc3b11-d41d-4fd5-a8ca-17480a6a4b16 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.028239] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be85e65-3138-4320-b656-7cc7bcfa3547 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.042973] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.052397] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 728.070874] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 728.071254] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.485810] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.485810] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.485968] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 728.486068] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 728.511761] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512415] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512415] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512415] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512415] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512415] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512635] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512635] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512741] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512858] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.512979] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 728.513677] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.712834] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5615ebf-f73e-4cac-8a5b-070f25129b1d tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] Acquiring lock "4093b8de-13ef-422d-a9ca-f8ae5eb3a18a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.713229] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5615ebf-f73e-4cac-8a5b-070f25129b1d tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] Lock "4093b8de-13ef-422d-a9ca-f8ae5eb3a18a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.691977] env[67424]: DEBUG oslo_concurrency.lockutils [None req-255d51c4-e339-4918-9408-a17bfdf58561 tempest-ImagesNegativeTestJSON-115544103 tempest-ImagesNegativeTestJSON-115544103-project-member] Acquiring lock "fb169c4d-3537-4479-a9f3-b56513eea871" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.692534] env[67424]: DEBUG oslo_concurrency.lockutils [None req-255d51c4-e339-4918-9408-a17bfdf58561 tempest-ImagesNegativeTestJSON-115544103 tempest-ImagesNegativeTestJSON-115544103-project-member] Lock "fb169c4d-3537-4479-a9f3-b56513eea871" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.600656] env[67424]: DEBUG oslo_concurrency.lockutils [None req-87a03065-283a-4ead-9a34-91b20b347e6f tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] Acquiring lock "825e6698-ee41-41da-888d-2863ea0b1973" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.600883] env[67424]: DEBUG oslo_concurrency.lockutils [None req-87a03065-283a-4ead-9a34-91b20b347e6f tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] Lock "825e6698-ee41-41da-888d-2863ea0b1973" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.142991] env[67424]: DEBUG oslo_concurrency.lockutils [None req-709623f1-2783-4f89-888d-48a473e5d62a tempest-ServersTestFqdnHostnames-1756207499 tempest-ServersTestFqdnHostnames-1756207499-project-member] Acquiring lock "2849f020-1ab9-4756-84a5-3180f06df920" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.143293] env[67424]: DEBUG oslo_concurrency.lockutils [None req-709623f1-2783-4f89-888d-48a473e5d62a tempest-ServersTestFqdnHostnames-1756207499 tempest-ServersTestFqdnHostnames-1756207499-project-member] Lock "2849f020-1ab9-4756-84a5-3180f06df920" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.071598] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b1927878-3cba-4f47-b9e8-87fe408679ab tempest-ServerExternalEventsTest-1360939349 tempest-ServerExternalEventsTest-1360939349-project-member] Acquiring lock "e15bbca3-2b79-4a30-bfa9-0e7648b98d96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.071916] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b1927878-3cba-4f47-b9e8-87fe408679ab tempest-ServerExternalEventsTest-1360939349 tempest-ServerExternalEventsTest-1360939349-project-member] Lock "e15bbca3-2b79-4a30-bfa9-0e7648b98d96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.606929] env[67424]: WARNING oslo_vmware.rw_handles [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 753.606929] env[67424]: ERROR oslo_vmware.rw_handles [ 753.607427] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 753.608873] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 753.609128] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Copying Virtual Disk [datastore2] vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/d7bd7c23-f06a-4bed-a1d6-e37607ade308/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 753.609400] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c63229de-f505-4ee6-8103-a50b766236a3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.616836] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Waiting for the task: (returnval){ [ 753.616836] env[67424]: value = "task-3199928" [ 753.616836] env[67424]: _type = "Task" [ 753.616836] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.625301] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Task: {'id': task-3199928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.127669] env[67424]: DEBUG oslo_vmware.exceptions [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 754.127765] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.128367] env[67424]: ERROR nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 754.128367] env[67424]: Faults: ['InvalidArgument'] [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Traceback (most recent call last): [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] yield resources [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self.driver.spawn(context, instance, image_meta, [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self._vmops.spawn(context, instance, image_meta, injected_files, [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self._fetch_image_if_missing(context, vi) [ 754.128367] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] image_cache(vi, tmp_image_ds_loc) [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] vm_util.copy_virtual_disk( [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] session._wait_for_task(vmdk_copy_task) [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] return self.wait_for_task(task_ref) [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] return evt.wait() [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] result = hub.switch() [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 754.128697] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] return self.greenlet.switch() [ 754.129022] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 754.129022] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self.f(*self.args, **self.kw) [ 754.129022] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 754.129022] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] raise exceptions.translate_fault(task_info.error) [ 754.129022] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 754.129022] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Faults: ['InvalidArgument'] [ 754.129022] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] [ 754.129022] env[67424]: INFO nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Terminating instance [ 754.130240] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.130481] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.131648] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7101fbec-d811-4eb1-8832-4e6c5020dcf8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.132769] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquiring lock "refresh_cache-eae4bf0e-4a44-4929-92f7-e4f4b6966187" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.132938] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquired lock "refresh_cache-eae4bf0e-4a44-4929-92f7-e4f4b6966187" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.133122] env[67424]: DEBUG nova.network.neutron [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 754.139911] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.140099] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 754.141343] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f9e57f-70ad-4557-a0f2-5a130bb2c5f0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.149039] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 754.149039] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]523b6aff-8e60-c21e-01bc-48672e5671ee" [ 754.149039] env[67424]: _type = "Task" [ 754.149039] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.156456] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]523b6aff-8e60-c21e-01bc-48672e5671ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.167834] env[67424]: DEBUG nova.network.neutron [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 754.267921] env[67424]: DEBUG nova.network.neutron [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.277939] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Releasing lock "refresh_cache-eae4bf0e-4a44-4929-92f7-e4f4b6966187" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.278326] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 754.278705] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 754.279775] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f5c417-5c11-4155-80ab-832b5e8a8cb6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.288988] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 754.289245] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc55403e-6c5d-4304-afb1-0595fa5d6d54 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.324415] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 754.324633] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 754.324814] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Deleting the datastore file [datastore2] eae4bf0e-4a44-4929-92f7-e4f4b6966187 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.325075] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28633f6e-b060-4b66-9f97-b364f80c8887 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.331217] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Waiting for the task: (returnval){ [ 754.331217] env[67424]: value = "task-3199930" [ 754.331217] env[67424]: _type = "Task" [ 754.331217] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.339200] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Task: {'id': task-3199930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.660221] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 754.660998] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating directory with path [datastore2] vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.661360] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5da757d9-da72-46b7-a38a-ee60167d7951 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.673435] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created directory with path [datastore2] vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.673521] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Fetch image to [datastore2] vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 754.673660] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 754.674428] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55433998-97d4-4756-81d5-9ffcbe06b614 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.681585] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43c6712-db3b-4ad3-81ed-25e90087615f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.691252] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d25c694-5b50-418b-a281-8e4425338e21 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.725496] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcc8340-6d99-48d8-bad0-7774245e1d64 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.730975] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bd252ccb-c766-4894-ab82-599f256004b7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.751251] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 754.804933] env[67424]: DEBUG oslo_vmware.rw_handles [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 754.866514] env[67424]: DEBUG oslo_vmware.rw_handles [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 754.866757] env[67424]: DEBUG oslo_vmware.rw_handles [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 754.870446] env[67424]: DEBUG oslo_vmware.api [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Task: {'id': task-3199930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031667} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.870744] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 754.870966] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 754.871201] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 754.871433] env[67424]: INFO nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Took 0.59 seconds to destroy the instance on the hypervisor. [ 754.871709] env[67424]: DEBUG oslo.service.loopingcall [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.871949] env[67424]: DEBUG nova.compute.manager [-] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 754.874675] env[67424]: DEBUG nova.compute.claims [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 754.874878] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.875145] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.326875] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7ea648-fa99-4f07-8bf2-24e16676f241 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.334569] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543eb428-d5bc-46cd-b748-bc782d6c0469 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.364037] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26872242-2c37-4e76-a086-8da5bdb49ab9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.371842] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8448aa50-89a8-4e00-8abb-b29ddb383034 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.384212] env[67424]: DEBUG nova.compute.provider_tree [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.409199] env[67424]: ERROR nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [req-712d0b4c-b21c-432f-ade4-78d887016a43] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b21acede-6243-4c82-934a-a3956380220f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-712d0b4c-b21c-432f-ade4-78d887016a43"}]}: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 755.425859] env[67424]: DEBUG nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 755.440093] env[67424]: DEBUG nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 755.440322] env[67424]: DEBUG nova.compute.provider_tree [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.452420] env[67424]: DEBUG nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 755.470037] env[67424]: DEBUG nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 755.848425] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed4553b-2413-4993-9001-5cf036d0dbfe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.857450] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8989db0-942e-4cce-b218-3f599c6b528f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.890352] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f4ec54-b4cc-45b5-af59-33120a5c6152 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.897068] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fe28ab-9a5b-480d-a51b-2e910d11601d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.909956] env[67424]: DEBUG nova.compute.provider_tree [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.948279] env[67424]: DEBUG nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Updated inventory for provider b21acede-6243-4c82-934a-a3956380220f with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 755.948565] env[67424]: DEBUG nova.compute.provider_tree [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Updating resource provider b21acede-6243-4c82-934a-a3956380220f generation from 42 to 43 during operation: update_inventory {{(pid=67424) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 755.948752] env[67424]: DEBUG nova.compute.provider_tree [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.967586] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.092s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.968179] env[67424]: ERROR nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 755.968179] env[67424]: Faults: ['InvalidArgument'] [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Traceback (most recent call last): [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self.driver.spawn(context, instance, image_meta, [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self._vmops.spawn(context, instance, image_meta, injected_files, [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self._fetch_image_if_missing(context, vi) [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] image_cache(vi, tmp_image_ds_loc) [ 755.968179] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] vm_util.copy_virtual_disk( [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] session._wait_for_task(vmdk_copy_task) [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] return self.wait_for_task(task_ref) [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] return evt.wait() [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] result = hub.switch() [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] return self.greenlet.switch() [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 755.968555] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] self.f(*self.args, **self.kw) [ 755.968871] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 755.968871] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] raise exceptions.translate_fault(task_info.error) [ 755.968871] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 755.968871] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Faults: ['InvalidArgument'] [ 755.968871] env[67424]: ERROR nova.compute.manager [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] [ 755.968871] env[67424]: DEBUG nova.compute.utils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 755.970414] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Build of instance eae4bf0e-4a44-4929-92f7-e4f4b6966187 was re-scheduled: A specified parameter was not correct: fileType [ 755.970414] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 755.970778] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 755.971011] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquiring lock "refresh_cache-eae4bf0e-4a44-4929-92f7-e4f4b6966187" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.971189] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Acquired lock "refresh_cache-eae4bf0e-4a44-4929-92f7-e4f4b6966187" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.971407] env[67424]: DEBUG nova.network.neutron [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.005541] env[67424]: DEBUG nova.network.neutron [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.117328] env[67424]: DEBUG nova.network.neutron [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.128852] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Releasing lock "refresh_cache-eae4bf0e-4a44-4929-92f7-e4f4b6966187" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.129083] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 756.129267] env[67424]: DEBUG nova.compute.manager [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] [instance: eae4bf0e-4a44-4929-92f7-e4f4b6966187] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 756.218634] env[67424]: INFO nova.scheduler.client.report [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Deleted allocations for instance eae4bf0e-4a44-4929-92f7-e4f4b6966187 [ 756.239911] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ce1f0a9c-4dec-43d6-9b20-82cd60dc419d tempest-ServersAdmin275Test-1806667437 tempest-ServersAdmin275Test-1806667437-project-member] Lock "eae4bf0e-4a44-4929-92f7-e4f4b6966187" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.126s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.284764] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 756.345266] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.345521] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.347079] env[67424]: INFO nova.compute.claims [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.765362] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8121aa-2fc1-4d0a-bec4-ecf162b74c91 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.773160] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ec725c-62e4-4a66-9d0b-1a8eeeb60b6c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.804017] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fdd890-6ad5-4f3a-b408-33cf080dcb5f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.811404] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48003c08-62b0-4965-b48c-3a1c00940b7b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.825116] env[67424]: DEBUG nova.compute.provider_tree [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.834721] env[67424]: DEBUG nova.scheduler.client.report [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 756.849200] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.504s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.849768] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 756.892458] env[67424]: DEBUG nova.compute.utils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 756.894134] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 756.894134] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 756.902900] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 756.964239] env[67424]: DEBUG nova.policy [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '573b93519e3f4e1d8e8b7332ec5f5114', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce49b0a9d8cc463981edd97634485fb3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 756.970047] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 757.002697] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.003007] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.003187] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.003384] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.003542] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.003677] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.003887] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.004350] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.004654] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.004873] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.005121] env[67424]: DEBUG nova.virt.hardware [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.006319] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca19acb-5ed0-40b1-a70d-088f4883df0e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.014487] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b5e472-f548-495d-8780-9ee6622266c3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.613202] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Successfully created port: 39b19813-c12a-4f6c-a91e-5fb2cbf5bc88 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.795732] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Successfully updated port: 39b19813-c12a-4f6c-a91e-5fb2cbf5bc88 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 758.806526] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "refresh_cache-de7549c2-328b-4ab2-b590-c32f8a7d3261" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.806718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquired lock "refresh_cache-de7549c2-328b-4ab2-b590-c32f8a7d3261" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.807062] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.906613] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.209295] env[67424]: DEBUG nova.compute.manager [req-af32c50f-5ab1-4e73-b451-98c83e13d065 req-4b06a741-7eea-4e73-a357-e494042d2187 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Received event network-vif-plugged-39b19813-c12a-4f6c-a91e-5fb2cbf5bc88 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 759.209391] env[67424]: DEBUG oslo_concurrency.lockutils [req-af32c50f-5ab1-4e73-b451-98c83e13d065 req-4b06a741-7eea-4e73-a357-e494042d2187 service nova] Acquiring lock "de7549c2-328b-4ab2-b590-c32f8a7d3261-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.209568] env[67424]: DEBUG oslo_concurrency.lockutils [req-af32c50f-5ab1-4e73-b451-98c83e13d065 req-4b06a741-7eea-4e73-a357-e494042d2187 service nova] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.209789] env[67424]: DEBUG oslo_concurrency.lockutils [req-af32c50f-5ab1-4e73-b451-98c83e13d065 req-4b06a741-7eea-4e73-a357-e494042d2187 service nova] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.209962] env[67424]: DEBUG nova.compute.manager [req-af32c50f-5ab1-4e73-b451-98c83e13d065 req-4b06a741-7eea-4e73-a357-e494042d2187 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] No waiting events found dispatching network-vif-plugged-39b19813-c12a-4f6c-a91e-5fb2cbf5bc88 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 759.210338] env[67424]: WARNING nova.compute.manager [req-af32c50f-5ab1-4e73-b451-98c83e13d065 req-4b06a741-7eea-4e73-a357-e494042d2187 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Received unexpected event network-vif-plugged-39b19813-c12a-4f6c-a91e-5fb2cbf5bc88 for instance with vm_state building and task_state spawning. [ 759.370158] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Updating instance_info_cache with network_info: [{"id": "39b19813-c12a-4f6c-a91e-5fb2cbf5bc88", "address": "fa:16:3e:3b:6c:ae", "network": {"id": "fe3f16a8-4b90-4be4-b2eb-590e5fdbe846", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-992555846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce49b0a9d8cc463981edd97634485fb3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b19813-c1", "ovs_interfaceid": "39b19813-c12a-4f6c-a91e-5fb2cbf5bc88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.385424] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Releasing lock "refresh_cache-de7549c2-328b-4ab2-b590-c32f8a7d3261" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.385733] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Instance network_info: |[{"id": "39b19813-c12a-4f6c-a91e-5fb2cbf5bc88", "address": "fa:16:3e:3b:6c:ae", "network": {"id": "fe3f16a8-4b90-4be4-b2eb-590e5fdbe846", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-992555846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce49b0a9d8cc463981edd97634485fb3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b19813-c1", "ovs_interfaceid": "39b19813-c12a-4f6c-a91e-5fb2cbf5bc88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 759.386429] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:6c:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39b19813-c12a-4f6c-a91e-5fb2cbf5bc88', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.394674] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Creating folder: Project (ce49b0a9d8cc463981edd97634485fb3). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.395371] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1c65b9f-b749-4341-a1bf-d7278dc71372 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.406456] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Created folder: Project (ce49b0a9d8cc463981edd97634485fb3) in parent group-v639843. [ 759.406456] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Creating folder: Instances. Parent ref: group-v639877. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.406456] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b400b928-045e-497c-82a2-d8fd0f676f3a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.414694] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Created folder: Instances in parent group-v639877. [ 759.414953] env[67424]: DEBUG oslo.service.loopingcall [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.415387] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 759.415612] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdc53372-fb5f-4400-905d-9bb13d0a1722 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.434596] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.434596] env[67424]: value = "task-3199933" [ 759.434596] env[67424]: _type = "Task" [ 759.434596] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.445853] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199933, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.944688] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199933, 'name': CreateVM_Task, 'duration_secs': 0.282557} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.944948] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 759.945520] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.945680] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.945989] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 759.946246] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-149fe7dd-a70f-4942-9738-572e20d67fa5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.950584] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Waiting for the task: (returnval){ [ 759.950584] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d413bc-9be1-9b5c-7e7e-2cada1b42f8a" [ 759.950584] env[67424]: _type = "Task" [ 759.950584] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.957933] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d413bc-9be1-9b5c-7e7e-2cada1b42f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.461871] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.462139] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 760.462354] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.839873] env[67424]: WARNING oslo_vmware.rw_handles [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 760.839873] env[67424]: ERROR oslo_vmware.rw_handles [ 760.840214] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore1 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 760.841773] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 760.842031] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Copying Virtual Disk [datastore1] vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore1] vmware_temp/c7fe8737-9eed-494d-8441-40288af6e4dc/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 760.842314] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6b699b5-3359-4d4c-b41f-a1f430506df4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.849479] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Waiting for the task: (returnval){ [ 760.849479] env[67424]: value = "task-3199934" [ 760.849479] env[67424]: _type = "Task" [ 760.849479] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.858179] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Task: {'id': task-3199934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.286325] env[67424]: DEBUG nova.compute.manager [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Received event network-changed-39b19813-c12a-4f6c-a91e-5fb2cbf5bc88 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 761.286556] env[67424]: DEBUG nova.compute.manager [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Refreshing instance network info cache due to event network-changed-39b19813-c12a-4f6c-a91e-5fb2cbf5bc88. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 761.286736] env[67424]: DEBUG oslo_concurrency.lockutils [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] Acquiring lock "refresh_cache-de7549c2-328b-4ab2-b590-c32f8a7d3261" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.286881] env[67424]: DEBUG oslo_concurrency.lockutils [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] Acquired lock "refresh_cache-de7549c2-328b-4ab2-b590-c32f8a7d3261" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.287116] env[67424]: DEBUG nova.network.neutron [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Refreshing network info cache for port 39b19813-c12a-4f6c-a91e-5fb2cbf5bc88 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 761.360688] env[67424]: DEBUG oslo_vmware.exceptions [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 761.361072] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.361536] env[67424]: ERROR nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 761.361536] env[67424]: Faults: ['InvalidArgument'] [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Traceback (most recent call last): [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] yield resources [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self.driver.spawn(context, instance, image_meta, [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self._fetch_image_if_missing(context, vi) [ 761.361536] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] image_cache(vi, tmp_image_ds_loc) [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] vm_util.copy_virtual_disk( [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] session._wait_for_task(vmdk_copy_task) [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] return self.wait_for_task(task_ref) [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] return evt.wait() [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] result = hub.switch() [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 761.361849] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] return self.greenlet.switch() [ 761.362291] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 761.362291] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self.f(*self.args, **self.kw) [ 761.362291] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 761.362291] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] raise exceptions.translate_fault(task_info.error) [ 761.362291] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 761.362291] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Faults: ['InvalidArgument'] [ 761.362291] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] [ 761.362291] env[67424]: INFO nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Terminating instance [ 761.364535] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 761.364732] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 761.365732] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf86cac6-1d39-44a3-8d9d-3e12c2bd7a7c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.373277] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 761.373885] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30968eed-55b2-4a16-943f-8cff3c4e6cc6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.437229] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 761.437474] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Deleting contents of the VM from datastore datastore1 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 761.437656] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Deleting the datastore file [datastore1] 0ac8dd6c-41e3-4749-9129-02688c3385cc {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 761.437926] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60020626-387f-418a-8a0b-d473aa321ccd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.444249] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Waiting for the task: (returnval){ [ 761.444249] env[67424]: value = "task-3199936" [ 761.444249] env[67424]: _type = "Task" [ 761.444249] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.454915] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Task: {'id': task-3199936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.757242] env[67424]: DEBUG nova.network.neutron [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Updated VIF entry in instance network info cache for port 39b19813-c12a-4f6c-a91e-5fb2cbf5bc88. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 761.757577] env[67424]: DEBUG nova.network.neutron [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Updating instance_info_cache with network_info: [{"id": "39b19813-c12a-4f6c-a91e-5fb2cbf5bc88", "address": "fa:16:3e:3b:6c:ae", "network": {"id": "fe3f16a8-4b90-4be4-b2eb-590e5fdbe846", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-992555846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce49b0a9d8cc463981edd97634485fb3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b19813-c1", "ovs_interfaceid": "39b19813-c12a-4f6c-a91e-5fb2cbf5bc88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.768366] env[67424]: DEBUG oslo_concurrency.lockutils [req-1c18ddab-460a-4ff3-9d89-20365071c89d req-a05b2ab8-f801-41d8-96c5-31b2f2f0bfb3 service nova] Releasing lock "refresh_cache-de7549c2-328b-4ab2-b590-c32f8a7d3261" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.960021] env[67424]: DEBUG oslo_vmware.api [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Task: {'id': task-3199936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06472} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.960021] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.960021] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Deleted contents of the VM from datastore datastore1 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 761.960021] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 761.960021] env[67424]: INFO nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Took 0.59 seconds to destroy the instance on the hypervisor. [ 761.960564] env[67424]: DEBUG nova.compute.claims [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 761.960564] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.960564] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.504321] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db53e226-3c36-4a84-b9b0-92a70d4fbff0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.513984] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1280fa5-049a-4f2d-aa3b-9566305c7d67 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.548521] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b36bf0e-2732-4e4c-871d-01ba966d4668 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.556108] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f097aa9-e2a6-400b-af02-0ff5aae2d689 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.573660] env[67424]: DEBUG nova.compute.provider_tree [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.584227] env[67424]: DEBUG nova.scheduler.client.report [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 762.603502] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.643s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.605135] env[67424]: ERROR nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 762.605135] env[67424]: Faults: ['InvalidArgument'] [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Traceback (most recent call last): [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self.driver.spawn(context, instance, image_meta, [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self._fetch_image_if_missing(context, vi) [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] image_cache(vi, tmp_image_ds_loc) [ 762.605135] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] vm_util.copy_virtual_disk( [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] session._wait_for_task(vmdk_copy_task) [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] return self.wait_for_task(task_ref) [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] return evt.wait() [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] result = hub.switch() [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] return self.greenlet.switch() [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 762.605624] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] self.f(*self.args, **self.kw) [ 762.605893] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 762.605893] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] raise exceptions.translate_fault(task_info.error) [ 762.605893] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 762.605893] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Faults: ['InvalidArgument'] [ 762.605893] env[67424]: ERROR nova.compute.manager [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] [ 762.605893] env[67424]: DEBUG nova.compute.utils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 762.606716] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Build of instance 0ac8dd6c-41e3-4749-9129-02688c3385cc was re-scheduled: A specified parameter was not correct: fileType [ 762.606716] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 762.607174] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 762.607389] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 762.607668] env[67424]: DEBUG nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 762.607822] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 763.377429] env[67424]: DEBUG nova.network.neutron [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.392837] env[67424]: INFO nova.compute.manager [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] [instance: 0ac8dd6c-41e3-4749-9129-02688c3385cc] Took 0.78 seconds to deallocate network for instance. [ 763.517420] env[67424]: INFO nova.scheduler.client.report [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Deleted allocations for instance 0ac8dd6c-41e3-4749-9129-02688c3385cc [ 763.540603] env[67424]: DEBUG oslo_concurrency.lockutils [None req-66216d5d-3cc9-45c7-87c0-806d53c4da67 tempest-InstanceActionsNegativeTestJSON-87382798 tempest-InstanceActionsNegativeTestJSON-87382798-project-member] Lock "0ac8dd6c-41e3-4749-9129-02688c3385cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.501s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.556305] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 763.608350] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.608599] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.610067] env[67424]: INFO nova.compute.claims [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.120108] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f37f72-8246-4392-8d1f-e64851b74c1d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.129124] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef061492-f024-4b20-9cc2-d53266d65b1e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.158827] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e89496-fe1b-4f3b-8324-769073e25684 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.166389] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b253ce1-f250-4146-87e8-1a90d05e7854 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.180560] env[67424]: DEBUG nova.compute.provider_tree [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.191729] env[67424]: DEBUG nova.scheduler.client.report [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 764.208681] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.600s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.208922] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 764.249929] env[67424]: DEBUG nova.compute.utils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 764.254778] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 764.254778] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 764.260791] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 764.334282] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 764.355353] env[67424]: DEBUG nova.policy [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c29e78a3540e41d4b5b2727b5135c15c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f0913481268401bbef75bc0a08aa16a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 764.365871] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.366055] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.366219] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.366444] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.366550] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.366695] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.366904] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.367121] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.367626] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.367626] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.367626] env[67424]: DEBUG nova.virt.hardware [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.368574] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7bc034-9806-4625-850d-2adc1f1d1cd0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.377378] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3958fc91-ea30-4748-8a53-d99972368eb6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.914558] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Successfully created port: d48c5153-9f59-4844-bca0-c707300a5451 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.308876] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Successfully updated port: d48c5153-9f59-4844-bca0-c707300a5451 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 766.327494] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "refresh_cache-924956a0-9a91-4870-a240-6a1d7868904b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.327494] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquired lock "refresh_cache-924956a0-9a91-4870-a240-6a1d7868904b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.327494] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.411331] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.756483] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Updating instance_info_cache with network_info: [{"id": "d48c5153-9f59-4844-bca0-c707300a5451", "address": "fa:16:3e:b9:e1:7d", "network": {"id": "27c8a410-acbb-42d3-9748-2d418538fbb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1762708178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f0913481268401bbef75bc0a08aa16a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd48c5153-9f", "ovs_interfaceid": "d48c5153-9f59-4844-bca0-c707300a5451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.768398] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Releasing lock "refresh_cache-924956a0-9a91-4870-a240-6a1d7868904b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.768709] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Instance network_info: |[{"id": "d48c5153-9f59-4844-bca0-c707300a5451", "address": "fa:16:3e:b9:e1:7d", "network": {"id": "27c8a410-acbb-42d3-9748-2d418538fbb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1762708178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f0913481268401bbef75bc0a08aa16a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd48c5153-9f", "ovs_interfaceid": "d48c5153-9f59-4844-bca0-c707300a5451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 766.769175] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:e1:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '939c05b6-8f31-4f3a-95ac-6297e0bd243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd48c5153-9f59-4844-bca0-c707300a5451', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.781325] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Creating folder: Project (5f0913481268401bbef75bc0a08aa16a). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 766.782549] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e150d71-9768-422b-88c1-7918d686dd35 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.793438] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Created folder: Project (5f0913481268401bbef75bc0a08aa16a) in parent group-v639843. [ 766.793641] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Creating folder: Instances. Parent ref: group-v639880. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 766.793876] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e375aa72-82d8-4a0d-842a-c2a94e84600e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.805707] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Created folder: Instances in parent group-v639880. [ 766.805962] env[67424]: DEBUG oslo.service.loopingcall [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.806173] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 766.806381] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d654fe4-78b5-4b31-b3b5-82b6df40f28e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.823577] env[67424]: DEBUG nova.compute.manager [req-0f1f4b75-4f21-49f0-9bde-7f8e8da8932a req-de29c55a-19f4-4bcd-bd94-259c9c962c63 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Received event network-vif-plugged-d48c5153-9f59-4844-bca0-c707300a5451 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 766.823696] env[67424]: DEBUG oslo_concurrency.lockutils [req-0f1f4b75-4f21-49f0-9bde-7f8e8da8932a req-de29c55a-19f4-4bcd-bd94-259c9c962c63 service nova] Acquiring lock "924956a0-9a91-4870-a240-6a1d7868904b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.823925] env[67424]: DEBUG oslo_concurrency.lockutils [req-0f1f4b75-4f21-49f0-9bde-7f8e8da8932a req-de29c55a-19f4-4bcd-bd94-259c9c962c63 service nova] Lock "924956a0-9a91-4870-a240-6a1d7868904b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.824199] env[67424]: DEBUG oslo_concurrency.lockutils [req-0f1f4b75-4f21-49f0-9bde-7f8e8da8932a req-de29c55a-19f4-4bcd-bd94-259c9c962c63 service nova] Lock "924956a0-9a91-4870-a240-6a1d7868904b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.824456] env[67424]: DEBUG nova.compute.manager [req-0f1f4b75-4f21-49f0-9bde-7f8e8da8932a req-de29c55a-19f4-4bcd-bd94-259c9c962c63 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] No waiting events found dispatching network-vif-plugged-d48c5153-9f59-4844-bca0-c707300a5451 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 766.824627] env[67424]: WARNING nova.compute.manager [req-0f1f4b75-4f21-49f0-9bde-7f8e8da8932a req-de29c55a-19f4-4bcd-bd94-259c9c962c63 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Received unexpected event network-vif-plugged-d48c5153-9f59-4844-bca0-c707300a5451 for instance with vm_state building and task_state spawning. [ 766.829682] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.829682] env[67424]: value = "task-3199939" [ 766.829682] env[67424]: _type = "Task" [ 766.829682] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.838283] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199939, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.340090] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199939, 'name': CreateVM_Task, 'duration_secs': 0.370613} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.340417] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 767.340984] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.341159] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.341471] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 767.342048] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-559149b1-fc81-48b2-8f24-767f06446e3f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.346603] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Waiting for the task: (returnval){ [ 767.346603] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52dce5c4-26f1-0372-7458-589254d16c5f" [ 767.346603] env[67424]: _type = "Task" [ 767.346603] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.357568] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52dce5c4-26f1-0372-7458-589254d16c5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.857650] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.858359] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 767.858359] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.238472] env[67424]: DEBUG nova.compute.manager [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Received event network-changed-d48c5153-9f59-4844-bca0-c707300a5451 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 769.238744] env[67424]: DEBUG nova.compute.manager [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Refreshing instance network info cache due to event network-changed-d48c5153-9f59-4844-bca0-c707300a5451. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 769.238880] env[67424]: DEBUG oslo_concurrency.lockutils [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] Acquiring lock "refresh_cache-924956a0-9a91-4870-a240-6a1d7868904b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.239114] env[67424]: DEBUG oslo_concurrency.lockutils [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] Acquired lock "refresh_cache-924956a0-9a91-4870-a240-6a1d7868904b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.239287] env[67424]: DEBUG nova.network.neutron [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Refreshing network info cache for port d48c5153-9f59-4844-bca0-c707300a5451 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 769.760018] env[67424]: DEBUG nova.network.neutron [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Updated VIF entry in instance network info cache for port d48c5153-9f59-4844-bca0-c707300a5451. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 769.760018] env[67424]: DEBUG nova.network.neutron [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Updating instance_info_cache with network_info: [{"id": "d48c5153-9f59-4844-bca0-c707300a5451", "address": "fa:16:3e:b9:e1:7d", "network": {"id": "27c8a410-acbb-42d3-9748-2d418538fbb7", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1762708178-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f0913481268401bbef75bc0a08aa16a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd48c5153-9f", "ovs_interfaceid": "d48c5153-9f59-4844-bca0-c707300a5451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.773962] env[67424]: DEBUG oslo_concurrency.lockutils [req-0dadd71e-35d1-4f7f-bd77-95369302224c req-0f3538d3-deeb-4334-b16d-a34501751718 service nova] Releasing lock "refresh_cache-924956a0-9a91-4870-a240-6a1d7868904b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.371362] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "850df4c3-3a92-47d3-973d-62f41d813f6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.371677] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.093982] env[67424]: DEBUG oslo_concurrency.lockutils [None req-597e61db-a85e-4e68-b5ce-ce601f54cbff tempest-ServersListShow296Test-1208499162 tempest-ServersListShow296Test-1208499162-project-member] Acquiring lock "a78a13fd-6763-427e-af14-a20bee858505" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.094325] env[67424]: DEBUG oslo_concurrency.lockutils [None req-597e61db-a85e-4e68-b5ce-ce601f54cbff tempest-ServersListShow296Test-1208499162 tempest-ServersListShow296Test-1208499162-project-member] Lock "a78a13fd-6763-427e-af14-a20bee858505" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.388760] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.389070] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.389244] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 787.387254] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.387557] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.387766] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.387959] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.401135] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.401418] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.401582] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.401762] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 787.403151] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e426eb-2a26-4056-8fde-8959168f3bb2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.412831] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c2f781-7fdc-4ffd-9045-22bffc79480e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.428335] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d325c28a-21cc-4969-80b9-59029b88a20c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.435195] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de383c02-6863-4911-ae85-a679de840df6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.470563] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181016MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 787.470755] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.470999] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.569739] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f9097bb5-5320-49e6-9c9a-6397a176a5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.569905] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ef935349-cb7c-4aaa-a735-a010501c5ed4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570055] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570185] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570303] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570432] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8556fc83-206e-4e50-bd54-4185132497a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570582] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570639] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570745] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.570914] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 787.584826] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.596850] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.609253] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7d7fcc13-5e0b-421f-80a9-f7f37afa51b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.619732] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aeddb8eb-4ca6-4e91-a140-05badd9e685e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.629880] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance acf30dc7-38e0-486a-a54a-c6ce56ce1c57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.643163] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e7454c0-a7b8-418e-90ab-f2ce85125b64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.655379] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance beb3c53f-a0ee-435e-9f95-c6bf0d68b872 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.665460] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.675634] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 39331dec-cd53-4cab-b48a-e4dd83f55b9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.686499] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 69d8f97f-f58d-4185-95fd-05ed6a6b52d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.696600] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 33e5f591-2ed7-4302-b7be-8b800cebd5f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.707087] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7912b146-0eae-4cf1-a19c-8b2ae94b22f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.716976] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7eb21186-f497-4031-ad88-6b61608b1c3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.727860] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 122dfe58-58f3-4d91-b0dd-f3dfd26bfaed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.739069] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf18eb94-40cd-4451-9cf9-a59679dc2231 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.749417] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ce3ac204-fc43-465d-9d08-b378c9df6275 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.758910] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4974d062-87b0-4773-a207-34b88bfe8c07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.768617] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4093b8de-13ef-422d-a9ca-f8ae5eb3a18a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.778656] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance fb169c4d-3537-4479-a9f3-b56513eea871 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.788335] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 825e6698-ee41-41da-888d-2863ea0b1973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.797761] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2849f020-1ab9-4756-84a5-3180f06df920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.807325] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance e15bbca3-2b79-4a30-bfa9-0e7648b98d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.819028] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.828850] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a78a13fd-6763-427e-af14-a20bee858505 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 787.829148] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 787.829302] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 788.180668] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb65ace-8de8-4029-bb1a-4429212ce2f5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.188322] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67709579-90a0-4ce0-a170-82e00975eca4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.218048] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4722c97c-a136-4ec8-b322-457e6b90d76b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.224820] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49dd3736-a802-4f32-997b-1376338a2afe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.237598] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.246271] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 788.263208] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 788.263442] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.792s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.259588] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.259885] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 789.259989] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 789.260112] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 789.282074] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.282276] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.282387] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.282521] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.282644] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.282763] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.282884] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.283013] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.283208] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.283294] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 789.283501] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 789.284118] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.425083] env[67424]: WARNING oslo_vmware.rw_handles [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 802.425083] env[67424]: ERROR oslo_vmware.rw_handles [ 802.425717] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 802.427526] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 802.427718] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Copying Virtual Disk [datastore2] vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/fef2a5f5-2cab-4d0e-bef6-80da063d1886/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 802.428011] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75275fb0-cff3-4498-9be6-40a91b9a3d98 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.436545] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 802.436545] env[67424]: value = "task-3199940" [ 802.436545] env[67424]: _type = "Task" [ 802.436545] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.444754] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.947404] env[67424]: DEBUG oslo_vmware.exceptions [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 802.947820] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.950829] env[67424]: ERROR nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 802.950829] env[67424]: Faults: ['InvalidArgument'] [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Traceback (most recent call last): [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] yield resources [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self.driver.spawn(context, instance, image_meta, [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self._fetch_image_if_missing(context, vi) [ 802.950829] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] image_cache(vi, tmp_image_ds_loc) [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] vm_util.copy_virtual_disk( [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] session._wait_for_task(vmdk_copy_task) [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] return self.wait_for_task(task_ref) [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] return evt.wait() [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] result = hub.switch() [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 802.951277] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] return self.greenlet.switch() [ 802.952857] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 802.952857] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self.f(*self.args, **self.kw) [ 802.952857] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 802.952857] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] raise exceptions.translate_fault(task_info.error) [ 802.952857] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 802.952857] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Faults: ['InvalidArgument'] [ 802.952857] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] [ 802.952857] env[67424]: INFO nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Terminating instance [ 802.952857] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.953154] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.953154] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a107e120-9a81-4afb-a72b-eba178de0b18 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.953571] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 802.953769] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 802.955215] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6165c964-7310-4eac-8da0-414d8b53c5db {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.962791] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 802.963847] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b8dc99a-d091-4618-b6fa-843cc0e472f5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.965305] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.965466] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 802.966152] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f0e2e46-bc17-47c9-ad46-afe26cfa419b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.971787] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Waiting for the task: (returnval){ [ 802.971787] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52def447-07d2-b2aa-93ae-34bdf785ca9d" [ 802.971787] env[67424]: _type = "Task" [ 802.971787] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.979206] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52def447-07d2-b2aa-93ae-34bdf785ca9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.028013] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 803.028013] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 803.028013] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleting the datastore file [datastore2] ef935349-cb7c-4aaa-a735-a010501c5ed4 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.028013] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f60ad9f-d00f-4e34-9ec2-d1ac6bb524e2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.034864] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 803.034864] env[67424]: value = "task-3199942" [ 803.034864] env[67424]: _type = "Task" [ 803.034864] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.043096] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.482176] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 803.482460] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Creating directory with path [datastore2] vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 803.482675] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dfbff93-ea39-434b-9071-5de8ff582f2a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.493963] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Created directory with path [datastore2] vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 803.494168] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Fetch image to [datastore2] vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 803.494341] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 803.495061] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b27e563-d178-432a-b4c0-a59ea15a5353 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.501345] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7e7e34-b472-4aae-b7f0-40f904e5f4d7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.510139] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e82af99-f4e6-47a2-9bfc-fbfebd10c8be {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.544894] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9483532-ab95-46bb-8739-0e704f2e5a11 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.551875] env[67424]: DEBUG oslo_vmware.api [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065213} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.553316] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.553514] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 803.553683] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 803.553853] env[67424]: INFO nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 803.556025] env[67424]: DEBUG nova.compute.claims [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 803.556114] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.556253] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.558752] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-25f16d02-9a81-46b9-bf1f-13e20a88049f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.580962] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 803.640217] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 803.702801] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 803.703022] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 804.040131] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631090ab-0784-4114-9f7a-cfa775fa1466 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.047480] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e92a2a5-a8b9-40c0-bc2f-01ba3a43a1af {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.081967] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425d3672-11a9-4ccc-8a3e-92740797ae70 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.089196] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c493d08d-7cab-411e-882a-157d0e71a8a2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.102163] env[67424]: DEBUG nova.compute.provider_tree [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.114075] env[67424]: DEBUG nova.scheduler.client.report [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 804.128931] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.573s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.129522] env[67424]: ERROR nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 804.129522] env[67424]: Faults: ['InvalidArgument'] [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Traceback (most recent call last): [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self.driver.spawn(context, instance, image_meta, [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self._fetch_image_if_missing(context, vi) [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] image_cache(vi, tmp_image_ds_loc) [ 804.129522] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] vm_util.copy_virtual_disk( [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] session._wait_for_task(vmdk_copy_task) [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] return self.wait_for_task(task_ref) [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] return evt.wait() [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] result = hub.switch() [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] return self.greenlet.switch() [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 804.129814] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] self.f(*self.args, **self.kw) [ 804.130158] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 804.130158] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] raise exceptions.translate_fault(task_info.error) [ 804.130158] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 804.130158] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Faults: ['InvalidArgument'] [ 804.130158] env[67424]: ERROR nova.compute.manager [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] [ 804.130286] env[67424]: DEBUG nova.compute.utils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 804.131744] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Build of instance ef935349-cb7c-4aaa-a735-a010501c5ed4 was re-scheduled: A specified parameter was not correct: fileType [ 804.131744] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 804.132170] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 804.132612] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 804.132612] env[67424]: DEBUG nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 804.132787] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.607642] env[67424]: DEBUG nova.network.neutron [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.620994] env[67424]: INFO nova.compute.manager [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: ef935349-cb7c-4aaa-a735-a010501c5ed4] Took 0.49 seconds to deallocate network for instance. [ 804.730571] env[67424]: INFO nova.scheduler.client.report [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleted allocations for instance ef935349-cb7c-4aaa-a735-a010501c5ed4 [ 804.751763] env[67424]: DEBUG oslo_concurrency.lockutils [None req-660f09fc-b30e-4a52-92e2-3a9a36faafee tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "ef935349-cb7c-4aaa-a735-a010501c5ed4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.076s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.767517] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 804.825932] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.825932] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.827982] env[67424]: INFO nova.compute.claims [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.266878] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2943d3f2-1f65-41a4-b248-376755162c27 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.274496] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fe1b52-b9ef-418b-b17a-d6c3f549be07 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.303876] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d583aad-6ff3-4cb2-8080-dc020b33d7c8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.311048] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c2a8f0-149d-48e5-93d9-885abdf3d91e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.325503] env[67424]: DEBUG nova.compute.provider_tree [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.333843] env[67424]: DEBUG nova.scheduler.client.report [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 805.353057] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.527s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.353558] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 805.389764] env[67424]: DEBUG nova.compute.utils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.391279] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 805.391481] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 805.400403] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 805.470440] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 805.479115] env[67424]: DEBUG nova.policy [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b9b1cd10cc34b50bc268fed0f95e324', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b0a9896a0bf44158642d13b39e7d3b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 805.500649] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.500882] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.501047] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.501305] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.501457] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.501602] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.501808] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.501965] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.502171] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.502351] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.502524] env[67424]: DEBUG nova.virt.hardware [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.503468] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764f3df6-88a6-43d6-b9bd-bfedd2125577 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.511691] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac2659a-b0f6-40fc-ad6d-02e988f789d0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.230469] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Successfully created port: 5bfe5ed9-4cf8-428d-973b-55247355336f {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.387728] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Successfully updated port: 5bfe5ed9-4cf8-428d-973b-55247355336f {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 807.398652] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "refresh_cache-2b6fd570-3691-4d29-8351-6c0d2fdb8e01" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.398785] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquired lock "refresh_cache-2b6fd570-3691-4d29-8351-6c0d2fdb8e01" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.398932] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 807.466149] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.607155] env[67424]: DEBUG nova.compute.manager [req-0bd04c39-cdc3-4fe2-b748-f8bfc19e2eb0 req-053b01f7-c822-43d6-8b5d-8a7db895d547 service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Received event network-vif-plugged-5bfe5ed9-4cf8-428d-973b-55247355336f {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 807.607463] env[67424]: DEBUG oslo_concurrency.lockutils [req-0bd04c39-cdc3-4fe2-b748-f8bfc19e2eb0 req-053b01f7-c822-43d6-8b5d-8a7db895d547 service nova] Acquiring lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.607622] env[67424]: DEBUG oslo_concurrency.lockutils [req-0bd04c39-cdc3-4fe2-b748-f8bfc19e2eb0 req-053b01f7-c822-43d6-8b5d-8a7db895d547 service nova] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.607784] env[67424]: DEBUG oslo_concurrency.lockutils [req-0bd04c39-cdc3-4fe2-b748-f8bfc19e2eb0 req-053b01f7-c822-43d6-8b5d-8a7db895d547 service nova] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.607950] env[67424]: DEBUG nova.compute.manager [req-0bd04c39-cdc3-4fe2-b748-f8bfc19e2eb0 req-053b01f7-c822-43d6-8b5d-8a7db895d547 service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] No waiting events found dispatching network-vif-plugged-5bfe5ed9-4cf8-428d-973b-55247355336f {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 807.608261] env[67424]: WARNING nova.compute.manager [req-0bd04c39-cdc3-4fe2-b748-f8bfc19e2eb0 req-053b01f7-c822-43d6-8b5d-8a7db895d547 service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Received unexpected event network-vif-plugged-5bfe5ed9-4cf8-428d-973b-55247355336f for instance with vm_state building and task_state spawning. [ 807.762607] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Updating instance_info_cache with network_info: [{"id": "5bfe5ed9-4cf8-428d-973b-55247355336f", "address": "fa:16:3e:63:c6:1b", "network": {"id": "e046a61f-f2a2-4d12-a6f1-be660da84d98", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-763562468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b0a9896a0bf44158642d13b39e7d3b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41f66e20-fd86-4158-bbdc-7a150e85e844", "external-id": "nsx-vlan-transportzone-182", "segmentation_id": 182, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bfe5ed9-4c", "ovs_interfaceid": "5bfe5ed9-4cf8-428d-973b-55247355336f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.777979] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Releasing lock "refresh_cache-2b6fd570-3691-4d29-8351-6c0d2fdb8e01" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.778293] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Instance network_info: |[{"id": "5bfe5ed9-4cf8-428d-973b-55247355336f", "address": "fa:16:3e:63:c6:1b", "network": {"id": "e046a61f-f2a2-4d12-a6f1-be660da84d98", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-763562468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b0a9896a0bf44158642d13b39e7d3b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41f66e20-fd86-4158-bbdc-7a150e85e844", "external-id": "nsx-vlan-transportzone-182", "segmentation_id": 182, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bfe5ed9-4c", "ovs_interfaceid": "5bfe5ed9-4cf8-428d-973b-55247355336f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 807.778702] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:c6:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41f66e20-fd86-4158-bbdc-7a150e85e844', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bfe5ed9-4cf8-428d-973b-55247355336f', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.789805] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Creating folder: Project (8b0a9896a0bf44158642d13b39e7d3b7). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 807.790376] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c72268e-2b44-4a20-85ea-7704e7a52d87 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.801110] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Created folder: Project (8b0a9896a0bf44158642d13b39e7d3b7) in parent group-v639843. [ 807.801336] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Creating folder: Instances. Parent ref: group-v639883. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 807.801574] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86492888-8445-45d0-b5d9-8c9e93112069 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.810617] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Created folder: Instances in parent group-v639883. [ 807.810836] env[67424]: DEBUG oslo.service.loopingcall [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.811025] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 807.811245] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a7973cd-cd11-444e-ba82-ce5851a78da9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.832456] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.832456] env[67424]: value = "task-3199945" [ 807.832456] env[67424]: _type = "Task" [ 807.832456] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.841237] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199945, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.345980] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199945, 'name': CreateVM_Task, 'duration_secs': 0.330299} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.346252] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 808.347043] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.347270] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.347594] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 808.347856] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6ce7d0b-8314-4262-be7d-ee001e4c6294 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.352699] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Waiting for the task: (returnval){ [ 808.352699] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52e98eab-9015-79b4-2055-212655e1a241" [ 808.352699] env[67424]: _type = "Task" [ 808.352699] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.367700] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.367937] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.368170] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.645261] env[67424]: DEBUG nova.compute.manager [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Received event network-changed-5bfe5ed9-4cf8-428d-973b-55247355336f {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 809.645494] env[67424]: DEBUG nova.compute.manager [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Refreshing instance network info cache due to event network-changed-5bfe5ed9-4cf8-428d-973b-55247355336f. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 809.645729] env[67424]: DEBUG oslo_concurrency.lockutils [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] Acquiring lock "refresh_cache-2b6fd570-3691-4d29-8351-6c0d2fdb8e01" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.645895] env[67424]: DEBUG oslo_concurrency.lockutils [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] Acquired lock "refresh_cache-2b6fd570-3691-4d29-8351-6c0d2fdb8e01" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.646046] env[67424]: DEBUG nova.network.neutron [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Refreshing network info cache for port 5bfe5ed9-4cf8-428d-973b-55247355336f {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 810.064649] env[67424]: DEBUG nova.network.neutron [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Updated VIF entry in instance network info cache for port 5bfe5ed9-4cf8-428d-973b-55247355336f. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 810.064998] env[67424]: DEBUG nova.network.neutron [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Updating instance_info_cache with network_info: [{"id": "5bfe5ed9-4cf8-428d-973b-55247355336f", "address": "fa:16:3e:63:c6:1b", "network": {"id": "e046a61f-f2a2-4d12-a6f1-be660da84d98", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-763562468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b0a9896a0bf44158642d13b39e7d3b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41f66e20-fd86-4158-bbdc-7a150e85e844", "external-id": "nsx-vlan-transportzone-182", "segmentation_id": 182, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bfe5ed9-4c", "ovs_interfaceid": "5bfe5ed9-4cf8-428d-973b-55247355336f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.075393] env[67424]: DEBUG oslo_concurrency.lockutils [req-830b57ae-c226-4bf0-918c-e3a0e8bdc315 req-c08e10b6-44e4-4bfa-8824-784e586cd6cc service nova] Releasing lock "refresh_cache-2b6fd570-3691-4d29-8351-6c0d2fdb8e01" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.388961] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.388961] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.389535] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.389535] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 848.384218] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.386783] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.386968] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.387806] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.388108] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 849.388146] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 849.408590] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.408730] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.408860] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409229] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409229] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409229] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409449] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409449] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409566] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409741] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 849.409882] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 849.410401] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.421193] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.421406] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.421569] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.421719] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 849.422981] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b6323d-642e-4ab7-b88b-a5142fc5d2e3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.431979] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba655526-598e-4101-b484-dcf9fc8e010c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.446501] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5d8c91-5956-4819-9c41-e100a1b4376c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.452788] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc46268-6873-4ea1-8e88-db24620e320f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.483383] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181009MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 849.483549] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.483708] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.555677] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f9097bb5-5320-49e6-9c9a-6397a176a5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.555841] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.555972] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.556117] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.556237] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8556fc83-206e-4e50-bd54-4185132497a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.556354] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.556469] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.556675] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.556733] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.556834] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 849.569050] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.579749] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7d7fcc13-5e0b-421f-80a9-f7f37afa51b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.589766] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aeddb8eb-4ca6-4e91-a140-05badd9e685e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.599637] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance acf30dc7-38e0-486a-a54a-c6ce56ce1c57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.608987] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e7454c0-a7b8-418e-90ab-f2ce85125b64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.618318] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance beb3c53f-a0ee-435e-9f95-c6bf0d68b872 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.627530] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.636903] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 39331dec-cd53-4cab-b48a-e4dd83f55b9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.645702] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 69d8f97f-f58d-4185-95fd-05ed6a6b52d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.654715] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 33e5f591-2ed7-4302-b7be-8b800cebd5f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.663774] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7912b146-0eae-4cf1-a19c-8b2ae94b22f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.672851] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7eb21186-f497-4031-ad88-6b61608b1c3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.681567] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 122dfe58-58f3-4d91-b0dd-f3dfd26bfaed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.691251] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf18eb94-40cd-4451-9cf9-a59679dc2231 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.702347] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ce3ac204-fc43-465d-9d08-b378c9df6275 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.712647] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4974d062-87b0-4773-a207-34b88bfe8c07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.722813] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4093b8de-13ef-422d-a9ca-f8ae5eb3a18a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.733401] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance fb169c4d-3537-4479-a9f3-b56513eea871 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.743108] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 825e6698-ee41-41da-888d-2863ea0b1973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.753577] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2849f020-1ab9-4756-84a5-3180f06df920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.763335] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance e15bbca3-2b79-4a30-bfa9-0e7648b98d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.773047] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.782551] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a78a13fd-6763-427e-af14-a20bee858505 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 849.782796] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 849.782987] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 850.166228] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d0b5da-6492-4072-861b-1c9214740d62 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.174037] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4431ce09-5e23-45e1-8b6e-9a24056b2c92 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.203417] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411dfcb4-de92-4681-bc79-43d0e63c0b18 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.210658] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd283b36-7ff7-4463-b7bd-8a0e96a5ac89 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.223618] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.232951] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 850.247635] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 850.247635] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.764s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.224587] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.383367] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.440059] env[67424]: WARNING oslo_vmware.rw_handles [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 852.440059] env[67424]: ERROR oslo_vmware.rw_handles [ 852.440666] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 852.442489] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 852.442742] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Copying Virtual Disk [datastore2] vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/6ffcfefa-c12e-46c9-b8f8-86e276e55caa/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 852.443292] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7cd687c7-6a83-4a4f-be1d-55aeff865331 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.450566] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Waiting for the task: (returnval){ [ 852.450566] env[67424]: value = "task-3199946" [ 852.450566] env[67424]: _type = "Task" [ 852.450566] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.459738] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Task: {'id': task-3199946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.960936] env[67424]: DEBUG oslo_vmware.exceptions [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 852.961251] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.961794] env[67424]: ERROR nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 852.961794] env[67424]: Faults: ['InvalidArgument'] [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Traceback (most recent call last): [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] yield resources [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self.driver.spawn(context, instance, image_meta, [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self._fetch_image_if_missing(context, vi) [ 852.961794] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] image_cache(vi, tmp_image_ds_loc) [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] vm_util.copy_virtual_disk( [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] session._wait_for_task(vmdk_copy_task) [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] return self.wait_for_task(task_ref) [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] return evt.wait() [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] result = hub.switch() [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 852.962206] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] return self.greenlet.switch() [ 852.962551] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 852.962551] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self.f(*self.args, **self.kw) [ 852.962551] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 852.962551] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] raise exceptions.translate_fault(task_info.error) [ 852.962551] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 852.962551] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Faults: ['InvalidArgument'] [ 852.962551] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] [ 852.962551] env[67424]: INFO nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Terminating instance [ 852.963719] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.963919] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.964170] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e30a8dd-1730-4508-8399-9c0624084c7b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.966369] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 852.966555] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 852.967267] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1459b3-1702-4a8a-a8b5-56f335ce7e86 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.973890] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 852.974112] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c359235e-cd20-42b5-a170-17c4b7bea980 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.976157] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.976336] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 852.977251] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f72da3-236e-43c9-82f6-e1dcd7fa3da9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.981898] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 852.981898] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b32abd-1022-08fc-44e2-72d34ce92188" [ 852.981898] env[67424]: _type = "Task" [ 852.981898] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.990240] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b32abd-1022-08fc-44e2-72d34ce92188, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.043311] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 853.043528] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 853.043732] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Deleting the datastore file [datastore2] f9097bb5-5320-49e6-9c9a-6397a176a5a3 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.044088] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8848c3e-84c6-4e2d-bb2d-d0e745284414 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.051268] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Waiting for the task: (returnval){ [ 853.051268] env[67424]: value = "task-3199948" [ 853.051268] env[67424]: _type = "Task" [ 853.051268] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.059185] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Task: {'id': task-3199948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.458673] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.492632] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 853.492856] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating directory with path [datastore2] vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.493135] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf65ae66-6d14-4d3b-a2fd-6bb7cef9f7e9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.504162] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Created directory with path [datastore2] vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.504386] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Fetch image to [datastore2] vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 853.504508] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 853.505228] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c8f8c1-7970-4f33-91a8-806d07a70d4e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.511887] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa148e0-b3f6-445b-86ba-ecd94b126d78 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.521196] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fd255d-045b-4cb9-957a-63e962b636fd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.555446] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64efdd0e-2aaf-4369-b4be-595d75c49d12 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.562328] env[67424]: DEBUG oslo_vmware.api [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Task: {'id': task-3199948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068207} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.563779] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 853.563976] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 853.564201] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 853.564347] env[67424]: INFO nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 853.566379] env[67424]: DEBUG nova.compute.claims [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 853.566547] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.566750] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.569232] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dc49c9e4-ef1d-436b-9ecb-f879fdcdcb4e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.591165] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 853.652423] env[67424]: DEBUG oslo_vmware.rw_handles [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 853.714813] env[67424]: DEBUG oslo_vmware.rw_handles [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 853.715018] env[67424]: DEBUG oslo_vmware.rw_handles [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 854.041281] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c40e5c1-679e-49f9-b4d2-9a29908e5892 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.048766] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3a6ef3-3641-4386-b121-747c907e6c7e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.077644] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515bdc29-dec7-4e24-9342-b32202334ee1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.084802] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ed137a-ba14-40e6-95d3-74d5c9944d58 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.098724] env[67424]: DEBUG nova.compute.provider_tree [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.107248] env[67424]: DEBUG nova.scheduler.client.report [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 854.123398] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.557s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.123937] env[67424]: ERROR nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 854.123937] env[67424]: Faults: ['InvalidArgument'] [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Traceback (most recent call last): [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self.driver.spawn(context, instance, image_meta, [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self._fetch_image_if_missing(context, vi) [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] image_cache(vi, tmp_image_ds_loc) [ 854.123937] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] vm_util.copy_virtual_disk( [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] session._wait_for_task(vmdk_copy_task) [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] return self.wait_for_task(task_ref) [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] return evt.wait() [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] result = hub.switch() [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] return self.greenlet.switch() [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 854.124376] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] self.f(*self.args, **self.kw) [ 854.124730] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 854.124730] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] raise exceptions.translate_fault(task_info.error) [ 854.124730] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 854.124730] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Faults: ['InvalidArgument'] [ 854.124730] env[67424]: ERROR nova.compute.manager [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] [ 854.124730] env[67424]: DEBUG nova.compute.utils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 854.129732] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Build of instance f9097bb5-5320-49e6-9c9a-6397a176a5a3 was re-scheduled: A specified parameter was not correct: fileType [ 854.129732] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 854.130160] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 854.130342] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 854.130515] env[67424]: DEBUG nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 854.130673] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 854.698669] env[67424]: DEBUG nova.network.neutron [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.711254] env[67424]: INFO nova.compute.manager [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Took 0.58 seconds to deallocate network for instance. [ 854.816328] env[67424]: INFO nova.scheduler.client.report [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Deleted allocations for instance f9097bb5-5320-49e6-9c9a-6397a176a5a3 [ 854.842246] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7e58d256-2c5e-478d-b6e8-4334bd91f26f tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.900s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.844435] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.385s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.844435] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Acquiring lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.844435] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.844435] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.849730] env[67424]: INFO nova.compute.manager [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Terminating instance [ 854.851503] env[67424]: DEBUG nova.compute.manager [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 854.851688] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 854.851968] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1943cb1d-24a5-454e-9e1c-8a347cfc95c7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.860748] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf723903-6eab-4b86-aff1-c3e5385dd453 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.872461] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 854.896845] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9097bb5-5320-49e6-9c9a-6397a176a5a3 could not be found. [ 854.897088] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 854.897289] env[67424]: INFO nova.compute.manager [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 854.897529] env[67424]: DEBUG oslo.service.loopingcall [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.897765] env[67424]: DEBUG nova.compute.manager [-] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 854.897856] env[67424]: DEBUG nova.network.neutron [-] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 854.934951] env[67424]: DEBUG nova.network.neutron [-] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.946549] env[67424]: INFO nova.compute.manager [-] [instance: f9097bb5-5320-49e6-9c9a-6397a176a5a3] Took 0.05 seconds to deallocate network for instance. [ 854.947817] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.948666] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.951674] env[67424]: INFO nova.compute.claims [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.084757] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8db89016-b3e8-478a-9c89-e4d96875532a tempest-ServerTagsTestJSON-2075723877 tempest-ServerTagsTestJSON-2075723877-project-member] Lock "f9097bb5-5320-49e6-9c9a-6397a176a5a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.241s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.400145] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bbf1f1-e455-412d-a9da-f06dc5ededbc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.408991] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a641f5-53f8-4e4d-b5b6-ffcb7beeef7e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.443762] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8271a3c4-017d-4496-9e50-f2b9c8f2b297 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.452503] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373118bc-bedb-4f0b-a043-81c85de12af7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.470450] env[67424]: DEBUG nova.compute.provider_tree [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.480027] env[67424]: DEBUG nova.scheduler.client.report [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 855.500203] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.551s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.500740] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 855.549019] env[67424]: DEBUG nova.compute.utils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.549019] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 855.549019] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 855.569088] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 855.633442] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 855.659552] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.659849] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.660015] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.660226] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.660391] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.660540] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.660751] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.660913] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.661222] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.661457] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.661702] env[67424]: DEBUG nova.virt.hardware [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 855.662637] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ade9c7-58eb-4f72-9144-12eeb5806d15 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.672057] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22229add-a3fc-43aa-86fd-b7d032c2ce2f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.699246] env[67424]: DEBUG nova.policy [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a474bae5e5ab47d8b15733dccb09f8fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '80afca15dea54a2796d8e587ec971c72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 856.804795] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Successfully created port: 5499db91-38eb-4861-86df-670de25cad3e {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.859164] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.833855] env[67424]: DEBUG oslo_concurrency.lockutils [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.342966] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Successfully updated port: 5499db91-38eb-4861-86df-670de25cad3e {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.358014] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "refresh_cache-0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.358014] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquired lock "refresh_cache-0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.358014] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 858.511178] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.548616] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "8556fc83-206e-4e50-bd54-4185132497a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.010011] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Updating instance_info_cache with network_info: [{"id": "5499db91-38eb-4861-86df-670de25cad3e", "address": "fa:16:3e:b9:1b:4e", "network": {"id": "954821de-7ef5-4426-a2ea-5c5e1ae7aec3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1672981683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "80afca15dea54a2796d8e587ec971c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5499db91-38", "ovs_interfaceid": "5499db91-38eb-4861-86df-670de25cad3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.017180] env[67424]: DEBUG nova.compute.manager [req-17f942ae-2e40-4e80-abc2-3c2c14bfca28 req-65932716-f38b-44ad-9403-955d75ce1b76 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Received event network-vif-plugged-5499db91-38eb-4861-86df-670de25cad3e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 859.017643] env[67424]: DEBUG oslo_concurrency.lockutils [req-17f942ae-2e40-4e80-abc2-3c2c14bfca28 req-65932716-f38b-44ad-9403-955d75ce1b76 service nova] Acquiring lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.017643] env[67424]: DEBUG oslo_concurrency.lockutils [req-17f942ae-2e40-4e80-abc2-3c2c14bfca28 req-65932716-f38b-44ad-9403-955d75ce1b76 service nova] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.017867] env[67424]: DEBUG oslo_concurrency.lockutils [req-17f942ae-2e40-4e80-abc2-3c2c14bfca28 req-65932716-f38b-44ad-9403-955d75ce1b76 service nova] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.018011] env[67424]: DEBUG nova.compute.manager [req-17f942ae-2e40-4e80-abc2-3c2c14bfca28 req-65932716-f38b-44ad-9403-955d75ce1b76 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] No waiting events found dispatching network-vif-plugged-5499db91-38eb-4861-86df-670de25cad3e {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 859.018894] env[67424]: WARNING nova.compute.manager [req-17f942ae-2e40-4e80-abc2-3c2c14bfca28 req-65932716-f38b-44ad-9403-955d75ce1b76 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Received unexpected event network-vif-plugged-5499db91-38eb-4861-86df-670de25cad3e for instance with vm_state building and task_state spawning. [ 859.027765] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Releasing lock "refresh_cache-0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.028107] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Instance network_info: |[{"id": "5499db91-38eb-4861-86df-670de25cad3e", "address": "fa:16:3e:b9:1b:4e", "network": {"id": "954821de-7ef5-4426-a2ea-5c5e1ae7aec3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1672981683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "80afca15dea54a2796d8e587ec971c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5499db91-38", "ovs_interfaceid": "5499db91-38eb-4861-86df-670de25cad3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 859.028800] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:1b:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '163e60bd-32d6-41c5-95e6-2eb10c5c9245', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5499db91-38eb-4861-86df-670de25cad3e', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.037343] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Creating folder: Project (80afca15dea54a2796d8e587ec971c72). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 859.038613] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-153d8ada-4086-4aef-88a0-0683b72117e3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.049177] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Created folder: Project (80afca15dea54a2796d8e587ec971c72) in parent group-v639843. [ 859.049406] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Creating folder: Instances. Parent ref: group-v639886. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 859.049665] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54ec15fe-4779-4e07-9adb-dfa69f10db21 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.058813] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Created folder: Instances in parent group-v639886. [ 859.059197] env[67424]: DEBUG oslo.service.loopingcall [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.059425] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 859.059653] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7178e49-4d91-4e39-a8b4-7f53ad658308 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.080273] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.080273] env[67424]: value = "task-3199951" [ 859.080273] env[67424]: _type = "Task" [ 859.080273] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.089324] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199951, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.590873] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199951, 'name': CreateVM_Task, 'duration_secs': 0.333787} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.591097] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 859.591786] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.591990] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.592361] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.592659] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d801d4f4-2c53-4112-ad18-24c690388fab {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.597373] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Waiting for the task: (returnval){ [ 859.597373] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b5b61d-4d28-81a7-b9bd-f33b2c18b285" [ 859.597373] env[67424]: _type = "Task" [ 859.597373] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.605291] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b5b61d-4d28-81a7-b9bd-f33b2c18b285, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.107690] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.107952] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 860.108185] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.261818] env[67424]: DEBUG nova.compute.manager [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Received event network-changed-5499db91-38eb-4861-86df-670de25cad3e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 861.262110] env[67424]: DEBUG nova.compute.manager [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Refreshing instance network info cache due to event network-changed-5499db91-38eb-4861-86df-670de25cad3e. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 861.262306] env[67424]: DEBUG oslo_concurrency.lockutils [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] Acquiring lock "refresh_cache-0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.262394] env[67424]: DEBUG oslo_concurrency.lockutils [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] Acquired lock "refresh_cache-0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.262558] env[67424]: DEBUG nova.network.neutron [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Refreshing network info cache for port 5499db91-38eb-4861-86df-670de25cad3e {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 861.721147] env[67424]: DEBUG nova.network.neutron [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Updated VIF entry in instance network info cache for port 5499db91-38eb-4861-86df-670de25cad3e. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 861.721388] env[67424]: DEBUG nova.network.neutron [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Updating instance_info_cache with network_info: [{"id": "5499db91-38eb-4861-86df-670de25cad3e", "address": "fa:16:3e:b9:1b:4e", "network": {"id": "954821de-7ef5-4426-a2ea-5c5e1ae7aec3", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1672981683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "80afca15dea54a2796d8e587ec971c72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "163e60bd-32d6-41c5-95e6-2eb10c5c9245", "external-id": "nsx-vlan-transportzone-716", "segmentation_id": 716, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5499db91-38", "ovs_interfaceid": "5499db91-38eb-4861-86df-670de25cad3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.730540] env[67424]: DEBUG oslo_concurrency.lockutils [req-0ef8d43f-a10d-4c5b-937c-ecaa3cebfd85 req-6bc4ea01-8fcb-4a85-9524-15d118330ce9 service nova] Releasing lock "refresh_cache-0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.464517] env[67424]: DEBUG oslo_concurrency.lockutils [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.752217] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.910925] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "de7549c2-328b-4ab2-b590-c32f8a7d3261" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.597655] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "924956a0-9a91-4870-a240-6a1d7868904b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.984338] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.921017] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.822463] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.822814] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.455494] env[67424]: WARNING oslo_vmware.rw_handles [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 902.455494] env[67424]: ERROR oslo_vmware.rw_handles [ 902.455494] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 902.457321] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 902.457760] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Copying Virtual Disk [datastore2] vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/56fbf314-9d0a-4b7c-92ad-bd0e59a731cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 902.457836] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-288ac919-1b7c-42f1-8765-141b2c453a6d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.465625] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 902.465625] env[67424]: value = "task-3199952" [ 902.465625] env[67424]: _type = "Task" [ 902.465625] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.473311] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': task-3199952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.976904] env[67424]: DEBUG oslo_vmware.exceptions [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 902.977255] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.977905] env[67424]: ERROR nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 902.977905] env[67424]: Faults: ['InvalidArgument'] [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Traceback (most recent call last): [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] yield resources [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self.driver.spawn(context, instance, image_meta, [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self._fetch_image_if_missing(context, vi) [ 902.977905] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] image_cache(vi, tmp_image_ds_loc) [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] vm_util.copy_virtual_disk( [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] session._wait_for_task(vmdk_copy_task) [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] return self.wait_for_task(task_ref) [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] return evt.wait() [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] result = hub.switch() [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 902.978296] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] return self.greenlet.switch() [ 902.978697] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 902.978697] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self.f(*self.args, **self.kw) [ 902.978697] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 902.978697] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] raise exceptions.translate_fault(task_info.error) [ 902.978697] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 902.978697] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Faults: ['InvalidArgument'] [ 902.978697] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] [ 902.978697] env[67424]: INFO nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Terminating instance [ 902.979835] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.979927] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.980952] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 902.981163] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 902.981392] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f0a07b7-2196-469f-a2ff-20fa8ab4f746 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.983887] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f378b2ca-1023-47d5-aa23-97e13a224e5a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.991414] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 902.991414] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e36d4f69-441c-4f4c-bb9b-4c1350981991 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.993889] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.994078] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 902.995055] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d8c5600-af18-4711-b4c2-d1f2f96cbc35 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.999942] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Waiting for the task: (returnval){ [ 902.999942] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b6306a-cf4f-08b1-2763-0191f113a9bc" [ 902.999942] env[67424]: _type = "Task" [ 902.999942] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.009496] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b6306a-cf4f-08b1-2763-0191f113a9bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.057047] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 903.057047] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 903.057047] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Deleting the datastore file [datastore2] dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.057047] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e263a368-1c84-4a6d-9cbd-cedbc9971398 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.062521] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 903.062521] env[67424]: value = "task-3199954" [ 903.062521] env[67424]: _type = "Task" [ 903.062521] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.070183] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': task-3199954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.144424] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.144424] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.512735] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 903.513075] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Creating directory with path [datastore2] vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.513805] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bae24ba1-ec4a-420d-9c88-be0973596bb7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.530263] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Created directory with path [datastore2] vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.530489] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Fetch image to [datastore2] vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 903.530684] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 903.531499] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b21bf7-9da3-4c05-a930-41623cb47a25 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.545137] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ebbf97-a63c-4824-b515-cd30d2e92389 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.554816] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1890925-9790-4023-9c50-b91b239df539 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.591484] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d52a92-662f-4e5c-9985-421e98b3c114 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.598895] env[67424]: DEBUG oslo_vmware.api [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': task-3199954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079936} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.600401] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.600595] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 903.600772] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 903.600943] env[67424]: INFO nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Took 0.62 seconds to destroy the instance on the hypervisor. [ 903.602776] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f973af35-2f3f-45d3-aa37-6a8cacac7c32 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.604744] env[67424]: DEBUG nova.compute.claims [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 903.604902] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.605129] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.628404] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 903.705113] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 903.777060] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 903.777060] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 904.138016] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774ffcba-23e7-49fd-8764-97e4f4c29357 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.146374] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86435982-2549-4d07-9d00-92d96749517b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.177142] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8db70f4-c7c7-4e8a-9a35-74fd585134a1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.184555] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cb8fa0-b948-4fab-8fb7-7c422dea88b2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.197381] env[67424]: DEBUG nova.compute.provider_tree [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.208336] env[67424]: DEBUG nova.scheduler.client.report [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 904.220766] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.615s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.220988] env[67424]: ERROR nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 904.220988] env[67424]: Faults: ['InvalidArgument'] [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Traceback (most recent call last): [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self.driver.spawn(context, instance, image_meta, [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self._fetch_image_if_missing(context, vi) [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] image_cache(vi, tmp_image_ds_loc) [ 904.220988] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] vm_util.copy_virtual_disk( [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] session._wait_for_task(vmdk_copy_task) [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] return self.wait_for_task(task_ref) [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] return evt.wait() [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] result = hub.switch() [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] return self.greenlet.switch() [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 904.222047] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] self.f(*self.args, **self.kw) [ 904.222347] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 904.222347] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] raise exceptions.translate_fault(task_info.error) [ 904.222347] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 904.222347] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Faults: ['InvalidArgument'] [ 904.222347] env[67424]: ERROR nova.compute.manager [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] [ 904.222347] env[67424]: DEBUG nova.compute.utils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 904.225711] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Build of instance dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 was re-scheduled: A specified parameter was not correct: fileType [ 904.225711] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 904.226210] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 904.226439] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 904.226647] env[67424]: DEBUG nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 904.226882] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.731411] env[67424]: DEBUG nova.network.neutron [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.749961] env[67424]: INFO nova.compute.manager [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Took 0.52 seconds to deallocate network for instance. [ 904.855555] env[67424]: INFO nova.scheduler.client.report [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Deleted allocations for instance dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 [ 904.883218] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ffd3a5c5-c8e3-4464-a73e-16f2cd6f7bee tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 247.811s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.883503] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 48.024s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.883748] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.883940] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.884121] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.888137] env[67424]: INFO nova.compute.manager [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Terminating instance [ 904.892847] env[67424]: DEBUG nova.compute.manager [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 904.893074] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 904.893348] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2879d98-8627-449e-84bd-8ef6c1711a8b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.903493] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1600c556-d45d-4568-a4df-db6e1b527db1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.918293] env[67424]: DEBUG nova.compute.manager [None req-bb9143fd-06e6-4e11-b0ef-234bf8a362e2 tempest-ImagesOneServerTestJSON-558811522 tempest-ImagesOneServerTestJSON-558811522-project-member] [instance: 7d7fcc13-5e0b-421f-80a9-f7f37afa51b0] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 904.941344] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dc7be619-c2a8-4d65-8534-0dc8c8bf2f80 could not be found. [ 904.941344] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 904.941344] env[67424]: INFO nova.compute.manager [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Took 0.05 seconds to destroy the instance on the hypervisor. [ 904.941583] env[67424]: DEBUG oslo.service.loopingcall [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.941986] env[67424]: DEBUG nova.compute.manager [-] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 904.941986] env[67424]: DEBUG nova.network.neutron [-] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.948528] env[67424]: DEBUG nova.compute.manager [None req-bb9143fd-06e6-4e11-b0ef-234bf8a362e2 tempest-ImagesOneServerTestJSON-558811522 tempest-ImagesOneServerTestJSON-558811522-project-member] [instance: 7d7fcc13-5e0b-421f-80a9-f7f37afa51b0] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 904.982372] env[67424]: DEBUG oslo_concurrency.lockutils [None req-bb9143fd-06e6-4e11-b0ef-234bf8a362e2 tempest-ImagesOneServerTestJSON-558811522 tempest-ImagesOneServerTestJSON-558811522-project-member] Lock "7d7fcc13-5e0b-421f-80a9-f7f37afa51b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.498s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.991683] env[67424]: DEBUG nova.compute.manager [None req-8ca89d28-6600-4fef-839c-a4f296ce107d tempest-ServersWithSpecificFlavorTestJSON-306453774 tempest-ServersWithSpecificFlavorTestJSON-306453774-project-member] [instance: aeddb8eb-4ca6-4e91-a140-05badd9e685e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 905.020014] env[67424]: DEBUG nova.compute.manager [None req-8ca89d28-6600-4fef-839c-a4f296ce107d tempest-ServersWithSpecificFlavorTestJSON-306453774 tempest-ServersWithSpecificFlavorTestJSON-306453774-project-member] [instance: aeddb8eb-4ca6-4e91-a140-05badd9e685e] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 905.044054] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ca89d28-6600-4fef-839c-a4f296ce107d tempest-ServersWithSpecificFlavorTestJSON-306453774 tempest-ServersWithSpecificFlavorTestJSON-306453774-project-member] Lock "aeddb8eb-4ca6-4e91-a140-05badd9e685e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.754s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.053361] env[67424]: DEBUG nova.compute.manager [None req-0ec8da25-2b62-4ef7-acbd-a7ac0a0746ae tempest-AttachInterfacesV270Test-325504290 tempest-AttachInterfacesV270Test-325504290-project-member] [instance: acf30dc7-38e0-486a-a54a-c6ce56ce1c57] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 905.076341] env[67424]: DEBUG nova.compute.manager [None req-0ec8da25-2b62-4ef7-acbd-a7ac0a0746ae tempest-AttachInterfacesV270Test-325504290 tempest-AttachInterfacesV270Test-325504290-project-member] [instance: acf30dc7-38e0-486a-a54a-c6ce56ce1c57] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 905.099473] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0ec8da25-2b62-4ef7-acbd-a7ac0a0746ae tempest-AttachInterfacesV270Test-325504290 tempest-AttachInterfacesV270Test-325504290-project-member] Lock "acf30dc7-38e0-486a-a54a-c6ce56ce1c57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.413s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.108616] env[67424]: DEBUG nova.compute.manager [None req-ed11387f-d442-4b14-96ac-30c45cd5a615 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 7e7454c0-a7b8-418e-90ab-f2ce85125b64] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 905.131455] env[67424]: DEBUG nova.compute.manager [None req-ed11387f-d442-4b14-96ac-30c45cd5a615 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 7e7454c0-a7b8-418e-90ab-f2ce85125b64] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 905.153112] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ed11387f-d442-4b14-96ac-30c45cd5a615 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "7e7454c0-a7b8-418e-90ab-f2ce85125b64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.718s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.158930] env[67424]: DEBUG nova.network.neutron [-] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.167866] env[67424]: DEBUG nova.compute.manager [None req-86b5b295-3f42-402b-ac62-7cb7c3758097 tempest-FloatingIPsAssociationTestJSON-1992332586 tempest-FloatingIPsAssociationTestJSON-1992332586-project-member] [instance: beb3c53f-a0ee-435e-9f95-c6bf0d68b872] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 905.174159] env[67424]: INFO nova.compute.manager [-] [instance: dc7be619-c2a8-4d65-8534-0dc8c8bf2f80] Took 0.23 seconds to deallocate network for instance. [ 905.197358] env[67424]: DEBUG nova.compute.manager [None req-86b5b295-3f42-402b-ac62-7cb7c3758097 tempest-FloatingIPsAssociationTestJSON-1992332586 tempest-FloatingIPsAssociationTestJSON-1992332586-project-member] [instance: beb3c53f-a0ee-435e-9f95-c6bf0d68b872] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 905.220627] env[67424]: DEBUG oslo_concurrency.lockutils [None req-86b5b295-3f42-402b-ac62-7cb7c3758097 tempest-FloatingIPsAssociationTestJSON-1992332586 tempest-FloatingIPsAssociationTestJSON-1992332586-project-member] Lock "beb3c53f-a0ee-435e-9f95-c6bf0d68b872" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.650s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.231884] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 905.291333] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dcc085a-7c55-44f3-b61e-79088c99253a tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "dc7be619-c2a8-4d65-8534-0dc8c8bf2f80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.408s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.298036] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.298036] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.300204] env[67424]: INFO nova.compute.claims [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.794887] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbe1d33-c5fb-4a23-b847-2261df316bad {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.805528] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59260bc-f087-4ba1-9997-e74883756b99 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.837145] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5add2c24-8612-40f5-acc3-c4fb1e31d891 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.845015] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6053b1ef-52aa-4d01-ba3c-77f100da16b8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.857855] env[67424]: DEBUG nova.compute.provider_tree [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.866793] env[67424]: DEBUG nova.scheduler.client.report [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 905.886764] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.589s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.887203] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 905.958432] env[67424]: DEBUG nova.compute.utils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.959926] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 905.960753] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.981807] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 906.061658] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 906.091989] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.093978] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.093978] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.093978] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.093978] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.093978] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.095821] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.095821] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.095821] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.095821] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.095821] env[67424]: DEBUG nova.virt.hardware [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.095970] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a3486a-9dd0-482a-8b99-f1b123a7c845 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.099677] env[67424]: DEBUG nova.policy [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3dad9fd609184974bc53c92bf0a4adbf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58e9a3038e904f61b17d2c6a3795f427', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 906.107847] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc2dfca-9f00-44cd-9dc8-7b0a62fd491d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.388012] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.388258] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 906.396419] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.396638] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.409716] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] There are 0 instances to clean {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 906.409974] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.410138] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances with incomplete migration {{(pid=67424) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 906.423471] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.751984] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Successfully created port: 6af19825-35f6-4cad-a2a3-2ebc30258c8f {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.431090] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.431394] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 908.090336] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Successfully updated port: 6af19825-35f6-4cad-a2a3-2ebc30258c8f {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.105901] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "refresh_cache-a7d131b6-3584-48c3-acce-d553c145a837" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.105901] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquired lock "refresh_cache-a7d131b6-3584-48c3-acce-d553c145a837" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.105901] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.152855] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.388096] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.476405] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Updating instance_info_cache with network_info: [{"id": "6af19825-35f6-4cad-a2a3-2ebc30258c8f", "address": "fa:16:3e:21:03:e0", "network": {"id": "3cf690da-b3db-4d8f-9df2-53ee6be5f7e5", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-557561506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58e9a3038e904f61b17d2c6a3795f427", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6af19825-35", "ovs_interfaceid": "6af19825-35f6-4cad-a2a3-2ebc30258c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.491464] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Releasing lock "refresh_cache-a7d131b6-3584-48c3-acce-d553c145a837" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.491768] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Instance network_info: |[{"id": "6af19825-35f6-4cad-a2a3-2ebc30258c8f", "address": "fa:16:3e:21:03:e0", "network": {"id": "3cf690da-b3db-4d8f-9df2-53ee6be5f7e5", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-557561506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58e9a3038e904f61b17d2c6a3795f427", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6af19825-35", "ovs_interfaceid": "6af19825-35f6-4cad-a2a3-2ebc30258c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 908.493048] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:03:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6af19825-35f6-4cad-a2a3-2ebc30258c8f', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 908.500553] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Creating folder: Project (58e9a3038e904f61b17d2c6a3795f427). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 908.503060] env[67424]: DEBUG nova.compute.manager [req-509e3421-af9c-491c-82c2-237fa51a7543 req-24c59bd8-e389-44f6-80dd-8a3966407818 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Received event network-vif-plugged-6af19825-35f6-4cad-a2a3-2ebc30258c8f {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 908.503060] env[67424]: DEBUG oslo_concurrency.lockutils [req-509e3421-af9c-491c-82c2-237fa51a7543 req-24c59bd8-e389-44f6-80dd-8a3966407818 service nova] Acquiring lock "a7d131b6-3584-48c3-acce-d553c145a837-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.503060] env[67424]: DEBUG oslo_concurrency.lockutils [req-509e3421-af9c-491c-82c2-237fa51a7543 req-24c59bd8-e389-44f6-80dd-8a3966407818 service nova] Lock "a7d131b6-3584-48c3-acce-d553c145a837-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.503060] env[67424]: DEBUG oslo_concurrency.lockutils [req-509e3421-af9c-491c-82c2-237fa51a7543 req-24c59bd8-e389-44f6-80dd-8a3966407818 service nova] Lock "a7d131b6-3584-48c3-acce-d553c145a837-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.503220] env[67424]: DEBUG nova.compute.manager [req-509e3421-af9c-491c-82c2-237fa51a7543 req-24c59bd8-e389-44f6-80dd-8a3966407818 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] No waiting events found dispatching network-vif-plugged-6af19825-35f6-4cad-a2a3-2ebc30258c8f {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 908.503220] env[67424]: WARNING nova.compute.manager [req-509e3421-af9c-491c-82c2-237fa51a7543 req-24c59bd8-e389-44f6-80dd-8a3966407818 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Received unexpected event network-vif-plugged-6af19825-35f6-4cad-a2a3-2ebc30258c8f for instance with vm_state building and task_state spawning. [ 908.503287] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c721c84f-f33c-4c41-af00-59e850ae63cc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.513923] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Created folder: Project (58e9a3038e904f61b17d2c6a3795f427) in parent group-v639843. [ 908.514122] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Creating folder: Instances. Parent ref: group-v639889. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 908.514341] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2593da61-5013-42bb-8070-f86e0e825e40 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.522325] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Created folder: Instances in parent group-v639889. [ 908.522550] env[67424]: DEBUG oslo.service.loopingcall [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.522727] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 908.522954] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29d15427-1eac-4c78-b8aa-9d81d56ca3b7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.541263] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 908.541263] env[67424]: value = "task-3199957" [ 908.541263] env[67424]: _type = "Task" [ 908.541263] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.548591] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199957, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.050330] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199957, 'name': CreateVM_Task, 'duration_secs': 0.288683} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.050500] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 909.051210] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.051386] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.051707] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 909.051953] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e0345fd-456d-4e8a-b047-a40619d9f725 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.056145] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Waiting for the task: (returnval){ [ 909.056145] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]527f6251-57a5-6b70-1b2c-fe68e1f18c57" [ 909.056145] env[67424]: _type = "Task" [ 909.056145] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.063652] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]527f6251-57a5-6b70-1b2c-fe68e1f18c57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.139778] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "a7d131b6-3584-48c3-acce-d553c145a837" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.383185] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.387764] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.388602] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.388778] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.566905] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.567200] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 909.567414] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.162570] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cb44eba5-e7db-4acd-97a7-d40713e7eac6 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "72480e41-88d7-4986-86fd-7d98aa82196b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.162889] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cb44eba5-e7db-4acd-97a7-d40713e7eac6 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "72480e41-88d7-4986-86fd-7d98aa82196b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.388666] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.919139] env[67424]: DEBUG nova.compute.manager [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Received event network-changed-6af19825-35f6-4cad-a2a3-2ebc30258c8f {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 910.919139] env[67424]: DEBUG nova.compute.manager [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Refreshing instance network info cache due to event network-changed-6af19825-35f6-4cad-a2a3-2ebc30258c8f. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 910.919139] env[67424]: DEBUG oslo_concurrency.lockutils [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] Acquiring lock "refresh_cache-a7d131b6-3584-48c3-acce-d553c145a837" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.919139] env[67424]: DEBUG oslo_concurrency.lockutils [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] Acquired lock "refresh_cache-a7d131b6-3584-48c3-acce-d553c145a837" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.919139] env[67424]: DEBUG nova.network.neutron [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Refreshing network info cache for port 6af19825-35f6-4cad-a2a3-2ebc30258c8f {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 911.389075] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.389075] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 911.389075] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 911.414206] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414206] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414206] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414206] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414206] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414412] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414412] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414412] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414412] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414412] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 911.414573] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 911.415078] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.429703] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.429996] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.430231] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.430390] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 911.431852] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bf42d3-8a9f-4857-8a73-c0728cc27e27 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.442142] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f397cf-e3ba-44d4-8693-21ec77e5bc89 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.457779] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7014e1-69b5-4b6c-b0d9-04a486a4198c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.464706] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07453316-9670-4b61-be9c-ba8b4386c594 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.497379] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180968MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 911.497561] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.497799] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.633820] env[67424]: DEBUG nova.network.neutron [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Updated VIF entry in instance network info cache for port 6af19825-35f6-4cad-a2a3-2ebc30258c8f. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 911.634184] env[67424]: DEBUG nova.network.neutron [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Updating instance_info_cache with network_info: [{"id": "6af19825-35f6-4cad-a2a3-2ebc30258c8f", "address": "fa:16:3e:21:03:e0", "network": {"id": "3cf690da-b3db-4d8f-9df2-53ee6be5f7e5", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-557561506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58e9a3038e904f61b17d2c6a3795f427", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6af19825-35", "ovs_interfaceid": "6af19825-35f6-4cad-a2a3-2ebc30258c8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.653601] env[67424]: DEBUG oslo_concurrency.lockutils [req-b55914d6-e253-4164-8ab7-22808f03d7bd req-e2d2e261-0c15-49f3-8034-0408f150dc22 service nova] Releasing lock "refresh_cache-a7d131b6-3584-48c3-acce-d553c145a837" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.659488] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.659667] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.659830] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8556fc83-206e-4e50-bd54-4185132497a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.659972] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.660134] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.660291] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.660424] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.660578] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.660707] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.660854] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 911.676732] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 33e5f591-2ed7-4302-b7be-8b800cebd5f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.692158] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7912b146-0eae-4cf1-a19c-8b2ae94b22f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.703795] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7eb21186-f497-4031-ad88-6b61608b1c3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.714504] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 122dfe58-58f3-4d91-b0dd-f3dfd26bfaed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.725876] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf18eb94-40cd-4451-9cf9-a59679dc2231 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.736347] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ce3ac204-fc43-465d-9d08-b378c9df6275 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.750150] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4974d062-87b0-4773-a207-34b88bfe8c07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.761845] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4093b8de-13ef-422d-a9ca-f8ae5eb3a18a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.775247] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance fb169c4d-3537-4479-a9f3-b56513eea871 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.793022] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 825e6698-ee41-41da-888d-2863ea0b1973 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.803635] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2849f020-1ab9-4756-84a5-3180f06df920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.815354] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance e15bbca3-2b79-4a30-bfa9-0e7648b98d96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.825520] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.837727] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a78a13fd-6763-427e-af14-a20bee858505 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.850196] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.865363] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.877948] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.889698] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72480e41-88d7-4986-86fd-7d98aa82196b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 911.889994] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 911.890184] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 912.315756] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b704a8-0467-40ca-ae29-f1227a077ebb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.323398] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807c8314-704d-4e90-82a1-02c4dd6b14ad {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.356260] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdce153e-48c2-418f-9864-3004e87af27f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.363997] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be532f5b-f557-4f15-85e5-8b2742000e25 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.383434] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.389052] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 912.404143] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 912.404338] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.907s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.957557] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f366d4dc-d7ce-4bf2-aad5-ce7f6843ba49 tempest-ServerRescueTestJSONUnderV235-1011947938 tempest-ServerRescueTestJSONUnderV235-1011947938-project-member] Acquiring lock "54dc90d0-4f6a-4b16-b1af-dc8c74aef382" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.957920] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f366d4dc-d7ce-4bf2-aad5-ce7f6843ba49 tempest-ServerRescueTestJSONUnderV235-1011947938 tempest-ServerRescueTestJSONUnderV235-1011947938-project-member] Lock "54dc90d0-4f6a-4b16-b1af-dc8c74aef382" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.066026] env[67424]: DEBUG oslo_concurrency.lockutils [None req-42b670cb-a699-4194-9ee6-20d80a9d0b11 tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] Acquiring lock "3ede76b6-042e-496f-aea7-a1c42166827f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.066319] env[67424]: DEBUG oslo_concurrency.lockutils [None req-42b670cb-a699-4194-9ee6-20d80a9d0b11 tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] Lock "3ede76b6-042e-496f-aea7-a1c42166827f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.838747] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ad2c225-49f3-4b9e-b565-7b9d88281994 tempest-FloatingIPsAssociationNegativeTestJSON-122126100 tempest-FloatingIPsAssociationNegativeTestJSON-122126100-project-member] Acquiring lock "614a4c23-3aee-4dd4-9ca7-534584122c00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.839038] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ad2c225-49f3-4b9e-b565-7b9d88281994 tempest-FloatingIPsAssociationNegativeTestJSON-122126100 tempest-FloatingIPsAssociationNegativeTestJSON-122126100-project-member] Lock "614a4c23-3aee-4dd4-9ca7-534584122c00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.097089] env[67424]: DEBUG oslo_concurrency.lockutils [None req-45c38e4d-7aa2-4c86-aa60-b3c0741808b6 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Acquiring lock "c0b7de50-17ad-4f8e-9887-345ca08e9d33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.097984] env[67424]: DEBUG oslo_concurrency.lockutils [None req-45c38e4d-7aa2-4c86-aa60-b3c0741808b6 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "c0b7de50-17ad-4f8e-9887-345ca08e9d33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.404631] env[67424]: DEBUG oslo_concurrency.lockutils [None req-72673e12-b998-4ea2-8c84-88bb7ed7a516 tempest-ServerPasswordTestJSON-1344184811 tempest-ServerPasswordTestJSON-1344184811-project-member] Acquiring lock "38255062-f950-4b14-90e1-96a30ce2843e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.404631] env[67424]: DEBUG oslo_concurrency.lockutils [None req-72673e12-b998-4ea2-8c84-88bb7ed7a516 tempest-ServerPasswordTestJSON-1344184811 tempest-ServerPasswordTestJSON-1344184811-project-member] Lock "38255062-f950-4b14-90e1-96a30ce2843e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.365886] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0aeb2f04-8dca-4bd1-839a-58b32d3acbb6 tempest-ServerRescueTestJSON-528481978 tempest-ServerRescueTestJSON-528481978-project-member] Acquiring lock "1e5007de-23df-4c65-9210-f460ed564216" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.366195] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0aeb2f04-8dca-4bd1-839a-58b32d3acbb6 tempest-ServerRescueTestJSON-528481978 tempest-ServerRescueTestJSON-528481978-project-member] Lock "1e5007de-23df-4c65-9210-f460ed564216" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.463035] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0fa9ed84-8ebf-4c02-920f-9e7f5f5f19ef tempest-ServerActionsTestOtherB-1172401824 tempest-ServerActionsTestOtherB-1172401824-project-member] Acquiring lock "cfef8a4c-abc4-4003-a932-e2f823c84e3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.463438] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0fa9ed84-8ebf-4c02-920f-9e7f5f5f19ef tempest-ServerActionsTestOtherB-1172401824 tempest-ServerActionsTestOtherB-1172401824-project-member] Lock "cfef8a4c-abc4-4003-a932-e2f823c84e3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.911480] env[67424]: WARNING oslo_vmware.rw_handles [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 950.911480] env[67424]: ERROR oslo_vmware.rw_handles [ 950.912185] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 950.913592] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 950.913842] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Copying Virtual Disk [datastore2] vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/96358927-95a3-4a12-845b-343a4843b3ec/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 950.914138] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3d74ee4-ff9e-4ed5-b1d9-4a7ab249ea58 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.921259] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Waiting for the task: (returnval){ [ 950.921259] env[67424]: value = "task-3199963" [ 950.921259] env[67424]: _type = "Task" [ 950.921259] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.929441] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Task: {'id': task-3199963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.432031] env[67424]: DEBUG oslo_vmware.exceptions [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 951.432442] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.433017] env[67424]: ERROR nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 951.433017] env[67424]: Faults: ['InvalidArgument'] [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Traceback (most recent call last): [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] yield resources [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self.driver.spawn(context, instance, image_meta, [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self._fetch_image_if_missing(context, vi) [ 951.433017] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] image_cache(vi, tmp_image_ds_loc) [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] vm_util.copy_virtual_disk( [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] session._wait_for_task(vmdk_copy_task) [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] return self.wait_for_task(task_ref) [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] return evt.wait() [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] result = hub.switch() [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 951.433372] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] return self.greenlet.switch() [ 951.433777] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 951.433777] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self.f(*self.args, **self.kw) [ 951.433777] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 951.433777] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] raise exceptions.translate_fault(task_info.error) [ 951.433777] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 951.433777] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Faults: ['InvalidArgument'] [ 951.433777] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] [ 951.433777] env[67424]: INFO nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Terminating instance [ 951.435099] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.435211] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.435841] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 951.437518] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 951.437518] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f700861b-37d7-43f4-8dd6-6bbbe7839f31 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.438762] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1769eca4-223d-444a-8ee5-cc2e8381f88e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.446456] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 951.446743] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-198edea6-bd57-4280-b44a-4ece4f956263 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.449386] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.449563] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 951.450554] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b22c8193-a250-47c7-91c6-4b5627e6fad6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.455424] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Waiting for the task: (returnval){ [ 951.455424] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52bbc9d1-1b38-0e5b-0f3a-a2b097d557d9" [ 951.455424] env[67424]: _type = "Task" [ 951.455424] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.463709] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52bbc9d1-1b38-0e5b-0f3a-a2b097d557d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.967215] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 951.967542] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Creating directory with path [datastore2] vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.967792] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6481972d-45f8-4c50-95f2-ca055043b043 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.990447] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Created directory with path [datastore2] vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.990652] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Fetch image to [datastore2] vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 951.990821] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 951.991624] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc738bb-eefc-4c24-9504-b01031a6e2cc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.998926] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9db441-82fd-41a9-9687-5878508662e6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.008260] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88299c9-9d32-415d-943d-e3724c4e1dfb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.040291] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1240620-f63e-4989-8c42-e59497119e0d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.046127] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e7cc4743-32c2-42b2-9c67-7b38a05c9842 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.060260] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 952.060570] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 952.060781] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Deleting the datastore file [datastore2] 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.061062] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afd2c0d0-13b4-4236-a52a-f6c3574ed830 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.066928] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Waiting for the task: (returnval){ [ 952.066928] env[67424]: value = "task-3199965" [ 952.066928] env[67424]: _type = "Task" [ 952.066928] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.068556] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 952.077843] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Task: {'id': task-3199965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.125325] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 952.187373] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 952.187614] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 952.579182] env[67424]: DEBUG oslo_vmware.api [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Task: {'id': task-3199965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070363} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.579499] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.579691] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 952.579862] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 952.580048] env[67424]: INFO nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 952.582181] env[67424]: DEBUG nova.compute.claims [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 952.582365] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.582612] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.904188] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e204e0a5-5459-41e2-8ece-ff1a7a97667b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.912753] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a89b093-fdb5-4e19-9613-41aa1635f9d5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.949295] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f603a1bb-e282-42d2-b4d5-fd7919dd2bc2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.959192] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab37651-3461-4f50-9984-949bffc54ff2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.979294] env[67424]: DEBUG nova.compute.provider_tree [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.015548] env[67424]: ERROR nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [req-baad5725-7919-4d00-b918-5da3cb03d4a8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b21acede-6243-4c82-934a-a3956380220f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-baad5725-7919-4d00-b918-5da3cb03d4a8"}]}: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 953.037902] env[67424]: DEBUG nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 953.069856] env[67424]: DEBUG nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 953.069856] env[67424]: DEBUG nova.compute.provider_tree [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.093026] env[67424]: DEBUG nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 953.124168] env[67424]: DEBUG nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 953.394919] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557effb2-8c72-4182-b8fe-913ea550a864 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.403146] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd5679b-e213-4e82-959b-a634dee67e71 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.432995] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7901abad-dc38-460e-999f-91c195b95ff8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.441140] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec20cc90-e944-439f-a1a4-736ecc0c9f1b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.455344] env[67424]: DEBUG nova.compute.provider_tree [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.491277] env[67424]: DEBUG nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Updated inventory for provider b21acede-6243-4c82-934a-a3956380220f with generation 56 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 953.491547] env[67424]: DEBUG nova.compute.provider_tree [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Updating resource provider b21acede-6243-4c82-934a-a3956380220f generation from 56 to 57 during operation: update_inventory {{(pid=67424) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 953.491728] env[67424]: DEBUG nova.compute.provider_tree [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.507434] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.925s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.507983] env[67424]: ERROR nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 953.507983] env[67424]: Faults: ['InvalidArgument'] [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Traceback (most recent call last): [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self.driver.spawn(context, instance, image_meta, [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self._fetch_image_if_missing(context, vi) [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] image_cache(vi, tmp_image_ds_loc) [ 953.507983] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] vm_util.copy_virtual_disk( [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] session._wait_for_task(vmdk_copy_task) [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] return self.wait_for_task(task_ref) [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] return evt.wait() [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] result = hub.switch() [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] return self.greenlet.switch() [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 953.508353] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] self.f(*self.args, **self.kw) [ 953.508694] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 953.508694] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] raise exceptions.translate_fault(task_info.error) [ 953.508694] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 953.508694] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Faults: ['InvalidArgument'] [ 953.508694] env[67424]: ERROR nova.compute.manager [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] [ 953.508694] env[67424]: DEBUG nova.compute.utils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 953.510178] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Build of instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb was re-scheduled: A specified parameter was not correct: fileType [ 953.510178] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 953.510545] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 953.510716] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 953.510869] env[67424]: DEBUG nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 953.511040] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.166048] env[67424]: DEBUG nova.network.neutron [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.182442] env[67424]: INFO nova.compute.manager [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Took 0.67 seconds to deallocate network for instance. [ 954.280753] env[67424]: INFO nova.scheduler.client.report [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Deleted allocations for instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb [ 954.306436] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7d93a08a-3924-4376-8b53-75aa44db08c3 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 296.528s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.308399] env[67424]: DEBUG oslo_concurrency.lockutils [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 96.474s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.308399] env[67424]: DEBUG oslo_concurrency.lockutils [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Acquiring lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.308399] env[67424]: DEBUG oslo_concurrency.lockutils [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.308642] env[67424]: DEBUG oslo_concurrency.lockutils [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.310958] env[67424]: INFO nova.compute.manager [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Terminating instance [ 954.312716] env[67424]: DEBUG nova.compute.manager [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 954.312825] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 954.313261] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0df77719-c700-40ed-bec0-65435b48ab1f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.318419] env[67424]: DEBUG nova.compute.manager [None req-4f1324e8-6c3e-45e8-9063-9fdae3dfaa4d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 39331dec-cd53-4cab-b48a-e4dd83f55b9a] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.326024] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fbff5b-c703-4a37-91b8-df4fce9dbbe5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.343745] env[67424]: DEBUG nova.compute.manager [None req-4f1324e8-6c3e-45e8-9063-9fdae3dfaa4d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 39331dec-cd53-4cab-b48a-e4dd83f55b9a] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.357674] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb could not be found. [ 954.357918] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.358077] env[67424]: INFO nova.compute.manager [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 954.358329] env[67424]: DEBUG oslo.service.loopingcall [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.358581] env[67424]: DEBUG nova.compute.manager [-] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 954.358731] env[67424]: DEBUG nova.network.neutron [-] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.377893] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4f1324e8-6c3e-45e8-9063-9fdae3dfaa4d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "39331dec-cd53-4cab-b48a-e4dd83f55b9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.229s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.388491] env[67424]: DEBUG nova.network.neutron [-] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.392152] env[67424]: DEBUG nova.compute.manager [None req-4e1aab40-c4a5-4b90-a164-314fe6d3dbb8 tempest-ImagesOneServerNegativeTestJSON-96875994 tempest-ImagesOneServerNegativeTestJSON-96875994-project-member] [instance: 69d8f97f-f58d-4185-95fd-05ed6a6b52d8] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.399489] env[67424]: INFO nova.compute.manager [-] [instance: 5689b9d0-a0e8-4461-a0e8-1c9cc32190cb] Took 0.04 seconds to deallocate network for instance. [ 954.416767] env[67424]: DEBUG nova.compute.manager [None req-4e1aab40-c4a5-4b90-a164-314fe6d3dbb8 tempest-ImagesOneServerNegativeTestJSON-96875994 tempest-ImagesOneServerNegativeTestJSON-96875994-project-member] [instance: 69d8f97f-f58d-4185-95fd-05ed6a6b52d8] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.437719] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e1aab40-c4a5-4b90-a164-314fe6d3dbb8 tempest-ImagesOneServerNegativeTestJSON-96875994 tempest-ImagesOneServerNegativeTestJSON-96875994-project-member] Lock "69d8f97f-f58d-4185-95fd-05ed6a6b52d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.158s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.454170] env[67424]: DEBUG nova.compute.manager [None req-2f4aafb5-e940-48fc-bde7-d99e2843bf4b tempest-ServerAddressesTestJSON-2079908818 tempest-ServerAddressesTestJSON-2079908818-project-member] [instance: 33e5f591-2ed7-4302-b7be-8b800cebd5f9] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.489342] env[67424]: DEBUG nova.compute.manager [None req-2f4aafb5-e940-48fc-bde7-d99e2843bf4b tempest-ServerAddressesTestJSON-2079908818 tempest-ServerAddressesTestJSON-2079908818-project-member] [instance: 33e5f591-2ed7-4302-b7be-8b800cebd5f9] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.512316] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2f4aafb5-e940-48fc-bde7-d99e2843bf4b tempest-ServerAddressesTestJSON-2079908818 tempest-ServerAddressesTestJSON-2079908818-project-member] Lock "33e5f591-2ed7-4302-b7be-8b800cebd5f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.120s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.535195] env[67424]: DEBUG nova.compute.manager [None req-2ef0f44b-194c-488d-bd0a-1e5fc7239e9f tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] [instance: 7912b146-0eae-4cf1-a19c-8b2ae94b22f8] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.540783] env[67424]: DEBUG oslo_concurrency.lockutils [None req-554643b1-94de-4596-8f40-1d32c9f60f45 tempest-ServerDiagnosticsNegativeTest-706399655 tempest-ServerDiagnosticsNegativeTest-706399655-project-member] Lock "5689b9d0-a0e8-4461-a0e8-1c9cc32190cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.233s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.558671] env[67424]: DEBUG nova.compute.manager [None req-2ef0f44b-194c-488d-bd0a-1e5fc7239e9f tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] [instance: 7912b146-0eae-4cf1-a19c-8b2ae94b22f8] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.578422] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2ef0f44b-194c-488d-bd0a-1e5fc7239e9f tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] Lock "7912b146-0eae-4cf1-a19c-8b2ae94b22f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.648s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.588862] env[67424]: DEBUG nova.compute.manager [None req-cc9b72f8-ca20-4d35-bbd9-8b5e8ca37198 tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] [instance: 7eb21186-f497-4031-ad88-6b61608b1c3c] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.612970] env[67424]: DEBUG nova.compute.manager [None req-cc9b72f8-ca20-4d35-bbd9-8b5e8ca37198 tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] [instance: 7eb21186-f497-4031-ad88-6b61608b1c3c] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.638773] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cc9b72f8-ca20-4d35-bbd9-8b5e8ca37198 tempest-ListImageFiltersTestJSON-2131471460 tempest-ListImageFiltersTestJSON-2131471460-project-member] Lock "7eb21186-f497-4031-ad88-6b61608b1c3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.012s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.648749] env[67424]: DEBUG nova.compute.manager [None req-bdf01269-36ed-4ce2-94c3-d5efbc525027 tempest-TenantUsagesTestJSON-26869185 tempest-TenantUsagesTestJSON-26869185-project-member] [instance: 122dfe58-58f3-4d91-b0dd-f3dfd26bfaed] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.672531] env[67424]: DEBUG nova.compute.manager [None req-bdf01269-36ed-4ce2-94c3-d5efbc525027 tempest-TenantUsagesTestJSON-26869185 tempest-TenantUsagesTestJSON-26869185-project-member] [instance: 122dfe58-58f3-4d91-b0dd-f3dfd26bfaed] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.694373] env[67424]: DEBUG oslo_concurrency.lockutils [None req-bdf01269-36ed-4ce2-94c3-d5efbc525027 tempest-TenantUsagesTestJSON-26869185 tempest-TenantUsagesTestJSON-26869185-project-member] Lock "122dfe58-58f3-4d91-b0dd-f3dfd26bfaed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.018s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.703577] env[67424]: DEBUG nova.compute.manager [None req-6f7e12f9-98fa-4ee8-b32a-1956c9c0dbe0 tempest-ServersTestManualDisk-2034689218 tempest-ServersTestManualDisk-2034689218-project-member] [instance: cf18eb94-40cd-4451-9cf9-a59679dc2231] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.727351] env[67424]: DEBUG nova.compute.manager [None req-6f7e12f9-98fa-4ee8-b32a-1956c9c0dbe0 tempest-ServersTestManualDisk-2034689218 tempest-ServersTestManualDisk-2034689218-project-member] [instance: cf18eb94-40cd-4451-9cf9-a59679dc2231] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.748711] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6f7e12f9-98fa-4ee8-b32a-1956c9c0dbe0 tempest-ServersTestManualDisk-2034689218 tempest-ServersTestManualDisk-2034689218-project-member] Lock "cf18eb94-40cd-4451-9cf9-a59679dc2231" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.888s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.758083] env[67424]: DEBUG nova.compute.manager [None req-6f30b1ea-be87-43b7-b29f-4af6193da475 tempest-ServerDiagnosticsTest-1248428146 tempest-ServerDiagnosticsTest-1248428146-project-member] [instance: ce3ac204-fc43-465d-9d08-b378c9df6275] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.781363] env[67424]: DEBUG nova.compute.manager [None req-6f30b1ea-be87-43b7-b29f-4af6193da475 tempest-ServerDiagnosticsTest-1248428146 tempest-ServerDiagnosticsTest-1248428146-project-member] [instance: ce3ac204-fc43-465d-9d08-b378c9df6275] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.802322] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6f30b1ea-be87-43b7-b29f-4af6193da475 tempest-ServerDiagnosticsTest-1248428146 tempest-ServerDiagnosticsTest-1248428146-project-member] Lock "ce3ac204-fc43-465d-9d08-b378c9df6275" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.338s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.813938] env[67424]: DEBUG nova.compute.manager [None req-d76f902c-29c2-4f20-8a90-bb3eaa825e98 tempest-ServerActionsV293TestJSON-661984579 tempest-ServerActionsV293TestJSON-661984579-project-member] [instance: 4974d062-87b0-4773-a207-34b88bfe8c07] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.841649] env[67424]: DEBUG nova.compute.manager [None req-d76f902c-29c2-4f20-8a90-bb3eaa825e98 tempest-ServerActionsV293TestJSON-661984579 tempest-ServerActionsV293TestJSON-661984579-project-member] [instance: 4974d062-87b0-4773-a207-34b88bfe8c07] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.863100] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d76f902c-29c2-4f20-8a90-bb3eaa825e98 tempest-ServerActionsV293TestJSON-661984579 tempest-ServerActionsV293TestJSON-661984579-project-member] Lock "4974d062-87b0-4773-a207-34b88bfe8c07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.129s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.872485] env[67424]: DEBUG nova.compute.manager [None req-d5615ebf-f73e-4cac-8a5b-070f25129b1d tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] [instance: 4093b8de-13ef-422d-a9ca-f8ae5eb3a18a] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 954.895924] env[67424]: DEBUG nova.compute.manager [None req-d5615ebf-f73e-4cac-8a5b-070f25129b1d tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] [instance: 4093b8de-13ef-422d-a9ca-f8ae5eb3a18a] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 954.918727] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5615ebf-f73e-4cac-8a5b-070f25129b1d tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] Lock "4093b8de-13ef-422d-a9ca-f8ae5eb3a18a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.205s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.934190] env[67424]: DEBUG nova.compute.manager [None req-255d51c4-e339-4918-9408-a17bfdf58561 tempest-ImagesNegativeTestJSON-115544103 tempest-ImagesNegativeTestJSON-115544103-project-member] [instance: fb169c4d-3537-4479-a9f3-b56513eea871] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.058416] env[67424]: DEBUG nova.compute.manager [None req-255d51c4-e339-4918-9408-a17bfdf58561 tempest-ImagesNegativeTestJSON-115544103 tempest-ImagesNegativeTestJSON-115544103-project-member] [instance: fb169c4d-3537-4479-a9f3-b56513eea871] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.085663] env[67424]: DEBUG oslo_concurrency.lockutils [None req-255d51c4-e339-4918-9408-a17bfdf58561 tempest-ImagesNegativeTestJSON-115544103 tempest-ImagesNegativeTestJSON-115544103-project-member] Lock "fb169c4d-3537-4479-a9f3-b56513eea871" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.393s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.095799] env[67424]: DEBUG nova.compute.manager [None req-87a03065-283a-4ead-9a34-91b20b347e6f tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] [instance: 825e6698-ee41-41da-888d-2863ea0b1973] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.121781] env[67424]: DEBUG nova.compute.manager [None req-87a03065-283a-4ead-9a34-91b20b347e6f tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] [instance: 825e6698-ee41-41da-888d-2863ea0b1973] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.144050] env[67424]: DEBUG oslo_concurrency.lockutils [None req-87a03065-283a-4ead-9a34-91b20b347e6f tempest-ServerRescueNegativeTestJSON-430444151 tempest-ServerRescueNegativeTestJSON-430444151-project-member] Lock "825e6698-ee41-41da-888d-2863ea0b1973" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.543s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.154963] env[67424]: DEBUG nova.compute.manager [None req-709623f1-2783-4f89-888d-48a473e5d62a tempest-ServersTestFqdnHostnames-1756207499 tempest-ServersTestFqdnHostnames-1756207499-project-member] [instance: 2849f020-1ab9-4756-84a5-3180f06df920] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.181941] env[67424]: DEBUG nova.compute.manager [None req-709623f1-2783-4f89-888d-48a473e5d62a tempest-ServersTestFqdnHostnames-1756207499 tempest-ServersTestFqdnHostnames-1756207499-project-member] [instance: 2849f020-1ab9-4756-84a5-3180f06df920] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.204971] env[67424]: DEBUG oslo_concurrency.lockutils [None req-709623f1-2783-4f89-888d-48a473e5d62a tempest-ServersTestFqdnHostnames-1756207499 tempest-ServersTestFqdnHostnames-1756207499-project-member] Lock "2849f020-1ab9-4756-84a5-3180f06df920" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.062s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.215229] env[67424]: DEBUG nova.compute.manager [None req-b1927878-3cba-4f47-b9e8-87fe408679ab tempest-ServerExternalEventsTest-1360939349 tempest-ServerExternalEventsTest-1360939349-project-member] [instance: e15bbca3-2b79-4a30-bfa9-0e7648b98d96] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.240561] env[67424]: DEBUG nova.compute.manager [None req-b1927878-3cba-4f47-b9e8-87fe408679ab tempest-ServerExternalEventsTest-1360939349 tempest-ServerExternalEventsTest-1360939349-project-member] [instance: e15bbca3-2b79-4a30-bfa9-0e7648b98d96] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.267265] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b1927878-3cba-4f47-b9e8-87fe408679ab tempest-ServerExternalEventsTest-1360939349 tempest-ServerExternalEventsTest-1360939349-project-member] Lock "e15bbca3-2b79-4a30-bfa9-0e7648b98d96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.195s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.279815] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.343026] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.343138] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.345314] env[67424]: INFO nova.compute.claims [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.705968] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab9dbfe-20b5-470c-9465-5e118f32deb4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.716555] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a94462c-f2fb-4b29-8d5b-eae2e97263a6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.747658] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01467673-de12-497d-b8ce-625b238b2aaf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.756104] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae6b77a-1a00-4105-834a-c1555579a665 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.770441] env[67424]: DEBUG nova.compute.provider_tree [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.780222] env[67424]: DEBUG nova.scheduler.client.report [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 955.800813] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.458s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.801576] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 955.845787] env[67424]: DEBUG nova.compute.utils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.850521] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 955.850700] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 955.861249] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 955.945557] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 955.974175] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 955.974618] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 955.974888] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.975209] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 955.975497] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.975857] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 955.976216] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 955.976491] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 955.976875] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 955.977197] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 955.977488] env[67424]: DEBUG nova.virt.hardware [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 955.978464] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d02ec5c-2b3d-436c-aec4-7ca52923b079 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.988390] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd5441a-2905-44b9-9db0-ce8e24d71b1a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.019683] env[67424]: DEBUG nova.policy [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce2b272a335849a9bb474e14fc55124d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c7031925a8a46b5bc5beaef1c8ea629', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 956.572395] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Successfully created port: 6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.977065] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Successfully updated port: 6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.989522] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "refresh_cache-850df4c3-3a92-47d3-973d-62f41d813f6c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.989801] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquired lock "refresh_cache-850df4c3-3a92-47d3-973d-62f41d813f6c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.990073] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 958.116303] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.496663] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Updating instance_info_cache with network_info: [{"id": "6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773", "address": "fa:16:3e:44:d6:59", "network": {"id": "4f9b4c01-4b93-45e1-85dc-5e2d2d54ae05", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1122111440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c7031925a8a46b5bc5beaef1c8ea629", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c35d6ed-89", "ovs_interfaceid": "6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.523799] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Releasing lock "refresh_cache-850df4c3-3a92-47d3-973d-62f41d813f6c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.525292] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Instance network_info: |[{"id": "6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773", "address": "fa:16:3e:44:d6:59", "network": {"id": "4f9b4c01-4b93-45e1-85dc-5e2d2d54ae05", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1122111440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c7031925a8a46b5bc5beaef1c8ea629", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c35d6ed-89", "ovs_interfaceid": "6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 958.525428] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:d6:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.533668] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Creating folder: Project (2c7031925a8a46b5bc5beaef1c8ea629). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.533761] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf2035d4-2f54-4f8f-9ccb-26a7e2db8a8d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.547621] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Created folder: Project (2c7031925a8a46b5bc5beaef1c8ea629) in parent group-v639843. [ 958.547952] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Creating folder: Instances. Parent ref: group-v639896. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.548108] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b5d3438-2159-42a8-9cfb-057197e110ae {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.561231] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Created folder: Instances in parent group-v639896. [ 958.561231] env[67424]: DEBUG oslo.service.loopingcall [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.561231] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 958.561231] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-751b8b72-3fdf-476a-9b5d-0087617285f0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.583389] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.583389] env[67424]: value = "task-3199971" [ 958.583389] env[67424]: _type = "Task" [ 958.583389] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.602176] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199971, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.618052] env[67424]: DEBUG nova.compute.manager [req-10c4c2d4-813e-47c6-b092-2ac6a1a5084c req-6fb68993-f1d0-4e94-b1ed-4b36d20f9e09 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Received event network-vif-plugged-6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 958.618579] env[67424]: DEBUG oslo_concurrency.lockutils [req-10c4c2d4-813e-47c6-b092-2ac6a1a5084c req-6fb68993-f1d0-4e94-b1ed-4b36d20f9e09 service nova] Acquiring lock "850df4c3-3a92-47d3-973d-62f41d813f6c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.618924] env[67424]: DEBUG oslo_concurrency.lockutils [req-10c4c2d4-813e-47c6-b092-2ac6a1a5084c req-6fb68993-f1d0-4e94-b1ed-4b36d20f9e09 service nova] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.620021] env[67424]: DEBUG oslo_concurrency.lockutils [req-10c4c2d4-813e-47c6-b092-2ac6a1a5084c req-6fb68993-f1d0-4e94-b1ed-4b36d20f9e09 service nova] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.620021] env[67424]: DEBUG nova.compute.manager [req-10c4c2d4-813e-47c6-b092-2ac6a1a5084c req-6fb68993-f1d0-4e94-b1ed-4b36d20f9e09 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] No waiting events found dispatching network-vif-plugged-6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 958.620021] env[67424]: WARNING nova.compute.manager [req-10c4c2d4-813e-47c6-b092-2ac6a1a5084c req-6fb68993-f1d0-4e94-b1ed-4b36d20f9e09 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Received unexpected event network-vif-plugged-6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773 for instance with vm_state building and task_state spawning. [ 959.094609] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199971, 'name': CreateVM_Task, 'duration_secs': 0.363502} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.094609] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 959.095404] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.095612] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.095980] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 959.096286] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-704c94e0-25a2-444f-9fe1-4a4a94da77f9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.101783] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Waiting for the task: (returnval){ [ 959.101783] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5214ccea-47c9-437f-8031-249ea70726f3" [ 959.101783] env[67424]: _type = "Task" [ 959.101783] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.115132] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5214ccea-47c9-437f-8031-249ea70726f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.614694] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.614694] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.614694] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.032383] env[67424]: DEBUG nova.compute.manager [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Received event network-changed-6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 961.032624] env[67424]: DEBUG nova.compute.manager [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Refreshing instance network info cache due to event network-changed-6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 961.032833] env[67424]: DEBUG oslo_concurrency.lockutils [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] Acquiring lock "refresh_cache-850df4c3-3a92-47d3-973d-62f41d813f6c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.033012] env[67424]: DEBUG oslo_concurrency.lockutils [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] Acquired lock "refresh_cache-850df4c3-3a92-47d3-973d-62f41d813f6c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.033564] env[67424]: DEBUG nova.network.neutron [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Refreshing network info cache for port 6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 961.090595] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.090862] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.526967] env[67424]: DEBUG nova.network.neutron [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Updated VIF entry in instance network info cache for port 6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 961.527536] env[67424]: DEBUG nova.network.neutron [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Updating instance_info_cache with network_info: [{"id": "6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773", "address": "fa:16:3e:44:d6:59", "network": {"id": "4f9b4c01-4b93-45e1-85dc-5e2d2d54ae05", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1122111440-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c7031925a8a46b5bc5beaef1c8ea629", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c35d6ed-89", "ovs_interfaceid": "6c35d6ed-89f7-4ab3-99d8-8bfcdb0fa773", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.539200] env[67424]: DEBUG oslo_concurrency.lockutils [req-8696c012-b29a-4b5f-816f-e441e6b19630 req-14becad1-cf7a-447a-817f-eed063bc77b1 service nova] Releasing lock "refresh_cache-850df4c3-3a92-47d3-973d-62f41d813f6c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.461532] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ec44372d-2931-49b4-8215-c9b1d2de0f8e tempest-ServersTestBootFromVolume-1003476590 tempest-ServersTestBootFromVolume-1003476590-project-member] Acquiring lock "85b0863a-17ed-4cad-8086-abc6f5755225" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.461826] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ec44372d-2931-49b4-8215-c9b1d2de0f8e tempest-ServersTestBootFromVolume-1003476590 tempest-ServersTestBootFromVolume-1003476590-project-member] Lock "85b0863a-17ed-4cad-8086-abc6f5755225" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.377131] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.377453] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 969.387918] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.388203] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.114996] env[67424]: DEBUG oslo_concurrency.lockutils [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "850df4c3-3a92-47d3-973d-62f41d813f6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.383081] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.387610] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.383089] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.407799] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.408024] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.388068] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.400699] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.400917] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.401099] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.401270] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 972.403154] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8557f69f-fe4a-4f6b-a53d-501042470ddb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.412827] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c0be28-28e5-4e8f-aa5d-17827ab64b4b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.428513] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd31841-dec1-48c3-b35e-2a61b3cdaea2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.435426] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cc2c5f-d60d-4b44-bb3d-3f7e2ec4751f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.464787] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181002MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 972.464981] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.465203] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.542508] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.542705] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8556fc83-206e-4e50-bd54-4185132497a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.542865] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.543025] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.543189] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.543333] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.543455] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.543603] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.544309] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.544696] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.556857] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a78a13fd-6763-427e-af14-a20bee858505 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.568820] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.579094] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.590687] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.600955] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72480e41-88d7-4986-86fd-7d98aa82196b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.611074] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 54dc90d0-4f6a-4b16-b1af-dc8c74aef382 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.620843] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3ede76b6-042e-496f-aea7-a1c42166827f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.630633] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 614a4c23-3aee-4dd4-9ca7-534584122c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.640295] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance c0b7de50-17ad-4f8e-9887-345ca08e9d33 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.649597] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 38255062-f950-4b14-90e1-96a30ce2843e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.659088] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 1e5007de-23df-4c65-9210-f460ed564216 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.670124] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cfef8a4c-abc4-4003-a932-e2f823c84e3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.683250] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.694440] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 85b0863a-17ed-4cad-8086-abc6f5755225 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 972.694715] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 972.694818] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 972.983020] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f5adf5-fcee-4f07-b1ae-f5af0b34fdd7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.990945] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a410fd3a-da52-47a2-aaa9-8aa0607e4f16 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.021179] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46b6a3a-9c5f-41b4-9502-ace7b17d11db {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.029092] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7989a6ec-9157-49ca-a1bd-9266969b1219 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.042533] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.053089] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 973.076845] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 973.077046] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.612s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.077528] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.077962] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 974.077962] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 974.100034] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100034] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100034] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100034] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100034] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100267] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100267] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100267] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100267] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100267] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 974.100420] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 983.562328] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9d4e6525-2952-4a92-b1ed-b925cdbffeba tempest-ServersNegativeTestMultiTenantJSON-500119815 tempest-ServersNegativeTestMultiTenantJSON-500119815-project-member] Acquiring lock "8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.562639] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9d4e6525-2952-4a92-b1ed-b925cdbffeba tempest-ServersNegativeTestMultiTenantJSON-500119815 tempest-ServersNegativeTestMultiTenantJSON-500119815-project-member] Lock "8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.685364] env[67424]: WARNING oslo_vmware.rw_handles [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 998.685364] env[67424]: ERROR oslo_vmware.rw_handles [ 998.686174] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 998.687740] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 998.688199] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Copying Virtual Disk [datastore2] vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/7ae783f9-79f6-4760-829e-b92df8b828f0/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 998.688553] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-493f7bad-868f-49eb-be0e-0c28a4eaa1ed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.697909] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Waiting for the task: (returnval){ [ 998.697909] env[67424]: value = "task-3199974" [ 998.697909] env[67424]: _type = "Task" [ 998.697909] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.708974] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Task: {'id': task-3199974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.209740] env[67424]: DEBUG oslo_vmware.exceptions [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 999.209978] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.210561] env[67424]: ERROR nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 999.210561] env[67424]: Faults: ['InvalidArgument'] [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Traceback (most recent call last): [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] yield resources [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self.driver.spawn(context, instance, image_meta, [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self._fetch_image_if_missing(context, vi) [ 999.210561] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] image_cache(vi, tmp_image_ds_loc) [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] vm_util.copy_virtual_disk( [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] session._wait_for_task(vmdk_copy_task) [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] return self.wait_for_task(task_ref) [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] return evt.wait() [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] result = hub.switch() [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 999.211131] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] return self.greenlet.switch() [ 999.211509] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 999.211509] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self.f(*self.args, **self.kw) [ 999.211509] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 999.211509] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] raise exceptions.translate_fault(task_info.error) [ 999.211509] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 999.211509] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Faults: ['InvalidArgument'] [ 999.211509] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] [ 999.211509] env[67424]: INFO nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Terminating instance [ 999.212474] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.212694] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.212927] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b771b4b4-f8a6-47db-8f6a-5dcc10ab3536 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.215213] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 999.215422] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 999.216160] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62891454-fc9f-4556-940f-a5d5a52583ba {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.222953] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 999.223191] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9631c1ee-2ac1-497d-91cb-7b65fa77c877 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.225452] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.225634] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 999.226489] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a709e692-5316-4ed6-b1f5-c70b6d1eb98b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.232209] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 999.232209] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]527ba79d-c7a1-82ac-e8cb-b8e39abdb553" [ 999.232209] env[67424]: _type = "Task" [ 999.232209] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.239410] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]527ba79d-c7a1-82ac-e8cb-b8e39abdb553, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.293917] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 999.294161] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 999.294415] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Deleting the datastore file [datastore2] 8556fc83-206e-4e50-bd54-4185132497a7 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 999.294834] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-507348aa-2183-4158-be9d-725c783f9793 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.301588] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Waiting for the task: (returnval){ [ 999.301588] env[67424]: value = "task-3199976" [ 999.301588] env[67424]: _type = "Task" [ 999.301588] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.309166] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Task: {'id': task-3199976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.743534] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 999.743858] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating directory with path [datastore2] vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.744046] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83a09f82-3aba-495b-8878-da4115e8ea9b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.755427] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created directory with path [datastore2] vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.755427] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Fetch image to [datastore2] vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 999.755616] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 999.756469] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbe00e8-3212-42ff-a798-deeb87ebbbe1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.762706] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8757bf3d-784e-40c4-b623-fd4cdc781c8f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.771397] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632648c9-cc86-49f1-a0e1-36059d9ed5dc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.807603] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9790eb8-d09b-46fc-89be-94f0dc6a0424 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.814621] env[67424]: DEBUG oslo_vmware.api [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Task: {'id': task-3199976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074648} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.816197] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.816396] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 999.816574] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 999.816743] env[67424]: INFO nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 999.818549] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8bd2fc78-4f69-46d1-aa2c-aa77cd81bc6d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.820531] env[67424]: DEBUG nova.compute.claims [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 999.820708] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.820918] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.847611] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 999.900750] env[67424]: DEBUG oslo_vmware.rw_handles [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 999.965124] env[67424]: DEBUG oslo_vmware.rw_handles [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 999.965412] env[67424]: DEBUG oslo_vmware.rw_handles [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1000.210137] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c54c321-bd61-4346-a2b0-e37034ad6b3b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.217205] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ccc521-ca0f-4da5-9aaa-7bcf6a973951 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.246173] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fffed6-0bf0-4398-b8c0-8713a688ea49 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.253421] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b7cbd9-1dca-4849-a8aa-0341c3a9c0fb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.267068] env[67424]: DEBUG nova.compute.provider_tree [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.276497] env[67424]: DEBUG nova.scheduler.client.report [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1000.307441] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.486s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.308050] env[67424]: ERROR nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1000.308050] env[67424]: Faults: ['InvalidArgument'] [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Traceback (most recent call last): [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self.driver.spawn(context, instance, image_meta, [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self._fetch_image_if_missing(context, vi) [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] image_cache(vi, tmp_image_ds_loc) [ 1000.308050] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] vm_util.copy_virtual_disk( [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] session._wait_for_task(vmdk_copy_task) [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] return self.wait_for_task(task_ref) [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] return evt.wait() [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] result = hub.switch() [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] return self.greenlet.switch() [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1000.308480] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] self.f(*self.args, **self.kw) [ 1000.308924] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1000.308924] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] raise exceptions.translate_fault(task_info.error) [ 1000.308924] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1000.308924] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Faults: ['InvalidArgument'] [ 1000.308924] env[67424]: ERROR nova.compute.manager [instance: 8556fc83-206e-4e50-bd54-4185132497a7] [ 1000.308924] env[67424]: DEBUG nova.compute.utils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1000.310392] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Build of instance 8556fc83-206e-4e50-bd54-4185132497a7 was re-scheduled: A specified parameter was not correct: fileType [ 1000.310392] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1000.310795] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1000.310997] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1000.311197] env[67424]: DEBUG nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1000.311376] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1000.973552] env[67424]: DEBUG nova.network.neutron [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.986365] env[67424]: INFO nova.compute.manager [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Took 0.67 seconds to deallocate network for instance. [ 1001.099422] env[67424]: INFO nova.scheduler.client.report [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Deleted allocations for instance 8556fc83-206e-4e50-bd54-4185132497a7 [ 1001.126335] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8ec8ca52-56e2-4cdb-8e7f-1cde3c9f3a77 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "8556fc83-206e-4e50-bd54-4185132497a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 341.208s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.127559] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "8556fc83-206e-4e50-bd54-4185132497a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 142.579s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.127780] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "8556fc83-206e-4e50-bd54-4185132497a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.127978] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "8556fc83-206e-4e50-bd54-4185132497a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.128160] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "8556fc83-206e-4e50-bd54-4185132497a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.130179] env[67424]: INFO nova.compute.manager [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Terminating instance [ 1001.131829] env[67424]: DEBUG nova.compute.manager [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1001.132078] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1001.132862] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-086de8d5-fd82-4df4-a7e7-32169de2cfe9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.142011] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a1dac9-1893-400b-b9f6-e10e3b6ba3d6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.153457] env[67424]: DEBUG nova.compute.manager [None req-597e61db-a85e-4e68-b5ce-ce601f54cbff tempest-ServersListShow296Test-1208499162 tempest-ServersListShow296Test-1208499162-project-member] [instance: a78a13fd-6763-427e-af14-a20bee858505] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1001.175956] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8556fc83-206e-4e50-bd54-4185132497a7 could not be found. [ 1001.175956] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1001.175956] env[67424]: INFO nova.compute.manager [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1001.176144] env[67424]: DEBUG oslo.service.loopingcall [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.176354] env[67424]: DEBUG nova.compute.manager [-] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1001.176469] env[67424]: DEBUG nova.network.neutron [-] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1001.180715] env[67424]: DEBUG nova.compute.manager [None req-597e61db-a85e-4e68-b5ce-ce601f54cbff tempest-ServersListShow296Test-1208499162 tempest-ServersListShow296Test-1208499162-project-member] [instance: a78a13fd-6763-427e-af14-a20bee858505] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1001.202419] env[67424]: DEBUG oslo_concurrency.lockutils [None req-597e61db-a85e-4e68-b5ce-ce601f54cbff tempest-ServersListShow296Test-1208499162 tempest-ServersListShow296Test-1208499162-project-member] Lock "a78a13fd-6763-427e-af14-a20bee858505" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.106s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.202419] env[67424]: DEBUG nova.network.neutron [-] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.210132] env[67424]: INFO nova.compute.manager [-] [instance: 8556fc83-206e-4e50-bd54-4185132497a7] Took 0.03 seconds to deallocate network for instance. [ 1001.215358] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1001.268582] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.269418] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.270988] env[67424]: INFO nova.compute.claims [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.334946] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c29946b0-127d-4425-8493-ff86e13d3216 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "8556fc83-206e-4e50-bd54-4185132497a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.206s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.609465] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef3a839-18ea-4c90-8610-980470d9b1a1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.618949] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae771e3-ceab-4db2-a41a-dc2920358ea8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.648634] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7c87cd-5671-4f83-9ce4-45c05f485e4e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.655668] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1e7c27-8797-43ca-9f01-212bf5c85d75 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.668492] env[67424]: DEBUG nova.compute.provider_tree [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.679773] env[67424]: DEBUG nova.scheduler.client.report [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1001.692690] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.424s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.693190] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1001.726415] env[67424]: DEBUG nova.compute.utils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1001.727994] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1001.728181] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1001.736367] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1001.798292] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1001.834372] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1001.834629] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1001.834786] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.834971] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1001.835133] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.835293] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1001.835525] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1001.835708] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1001.835885] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1001.836086] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1001.836268] env[67424]: DEBUG nova.virt.hardware [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1001.837385] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a51ad40-bf8d-4dc8-95cb-567227e8bf6e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.845293] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f4c986-3541-4367-ad55-7ba0b16cd7ec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.850483] env[67424]: DEBUG nova.policy [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66fa16dd270942dba4b94fa0309729ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '689a0b14ff0b42ad99141803bcb23266', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1002.383082] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Successfully created port: 06066b53-387b-49a9-80f3-d0cb3c4abf4c {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.083870] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e2010fdd-9f07-4e36-9942-2add29834856 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Acquiring lock "a0c7d0b0-e26c-4cba-9a45-8e60486e3905" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.083870] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e2010fdd-9f07-4e36-9942-2add29834856 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "a0c7d0b0-e26c-4cba-9a45-8e60486e3905" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.694778] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Successfully updated port: 06066b53-387b-49a9-80f3-d0cb3c4abf4c {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.707014] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "refresh_cache-2489aa3d-1973-4ede-9cae-dab971fa4a7c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.707180] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired lock "refresh_cache-2489aa3d-1973-4ede-9cae-dab971fa4a7c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.707329] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1003.770983] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1003.967806] env[67424]: DEBUG nova.compute.manager [req-05586718-a332-4c01-b02c-d1fbae8d554b req-d8233009-dd44-48be-bc7f-94f38cbb3c80 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Received event network-vif-plugged-06066b53-387b-49a9-80f3-d0cb3c4abf4c {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1003.968036] env[67424]: DEBUG oslo_concurrency.lockutils [req-05586718-a332-4c01-b02c-d1fbae8d554b req-d8233009-dd44-48be-bc7f-94f38cbb3c80 service nova] Acquiring lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.968248] env[67424]: DEBUG oslo_concurrency.lockutils [req-05586718-a332-4c01-b02c-d1fbae8d554b req-d8233009-dd44-48be-bc7f-94f38cbb3c80 service nova] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.968421] env[67424]: DEBUG oslo_concurrency.lockutils [req-05586718-a332-4c01-b02c-d1fbae8d554b req-d8233009-dd44-48be-bc7f-94f38cbb3c80 service nova] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.968580] env[67424]: DEBUG nova.compute.manager [req-05586718-a332-4c01-b02c-d1fbae8d554b req-d8233009-dd44-48be-bc7f-94f38cbb3c80 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] No waiting events found dispatching network-vif-plugged-06066b53-387b-49a9-80f3-d0cb3c4abf4c {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1003.968740] env[67424]: WARNING nova.compute.manager [req-05586718-a332-4c01-b02c-d1fbae8d554b req-d8233009-dd44-48be-bc7f-94f38cbb3c80 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Received unexpected event network-vif-plugged-06066b53-387b-49a9-80f3-d0cb3c4abf4c for instance with vm_state building and task_state spawning. [ 1004.107816] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Updating instance_info_cache with network_info: [{"id": "06066b53-387b-49a9-80f3-d0cb3c4abf4c", "address": "fa:16:3e:83:26:6f", "network": {"id": "f8e9d403-2dbb-40b5-8e1d-ea150b305c64", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-995680307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689a0b14ff0b42ad99141803bcb23266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06066b53-38", "ovs_interfaceid": "06066b53-387b-49a9-80f3-d0cb3c4abf4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.121392] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Releasing lock "refresh_cache-2489aa3d-1973-4ede-9cae-dab971fa4a7c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.121712] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Instance network_info: |[{"id": "06066b53-387b-49a9-80f3-d0cb3c4abf4c", "address": "fa:16:3e:83:26:6f", "network": {"id": "f8e9d403-2dbb-40b5-8e1d-ea150b305c64", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-995680307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689a0b14ff0b42ad99141803bcb23266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06066b53-38", "ovs_interfaceid": "06066b53-387b-49a9-80f3-d0cb3c4abf4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1004.122128] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:26:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9bb629cd-6d0f-4bed-965c-bd04a2f3ec49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06066b53-387b-49a9-80f3-d0cb3c4abf4c', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.130050] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating folder: Project (689a0b14ff0b42ad99141803bcb23266). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1004.130502] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07bbc22a-f27b-4b68-a7a3-ca5bc879cad6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.140775] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Created folder: Project (689a0b14ff0b42ad99141803bcb23266) in parent group-v639843. [ 1004.140968] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating folder: Instances. Parent ref: group-v639899. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1004.141180] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e1e8c24-00ba-4358-9678-1a7798790ce5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.149487] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Created folder: Instances in parent group-v639899. [ 1004.149714] env[67424]: DEBUG oslo.service.loopingcall [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.149890] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1004.150094] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9ace7a9-664e-46f6-b8ae-400b692dec54 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.168743] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.168743] env[67424]: value = "task-3199979" [ 1004.168743] env[67424]: _type = "Task" [ 1004.168743] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.176051] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199979, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.679032] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199979, 'name': CreateVM_Task} progress is 99%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.179862] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199979, 'name': CreateVM_Task} progress is 99%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.681656] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199979, 'name': CreateVM_Task, 'duration_secs': 1.342703} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.681830] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1005.682473] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.682649] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.682973] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1005.683241] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-196bcd70-7cad-4ce4-85aa-abccc1145b63 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.687628] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1005.687628] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]525a38fb-0638-6a52-2c46-e2bc12419ff7" [ 1005.687628] env[67424]: _type = "Task" [ 1005.687628] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.695194] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]525a38fb-0638-6a52-2c46-e2bc12419ff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.060941] env[67424]: DEBUG nova.compute.manager [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Received event network-changed-06066b53-387b-49a9-80f3-d0cb3c4abf4c {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1006.061160] env[67424]: DEBUG nova.compute.manager [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Refreshing instance network info cache due to event network-changed-06066b53-387b-49a9-80f3-d0cb3c4abf4c. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1006.061379] env[67424]: DEBUG oslo_concurrency.lockutils [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] Acquiring lock "refresh_cache-2489aa3d-1973-4ede-9cae-dab971fa4a7c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.061523] env[67424]: DEBUG oslo_concurrency.lockutils [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] Acquired lock "refresh_cache-2489aa3d-1973-4ede-9cae-dab971fa4a7c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.061682] env[67424]: DEBUG nova.network.neutron [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Refreshing network info cache for port 06066b53-387b-49a9-80f3-d0cb3c4abf4c {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1006.198951] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.199273] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.199516] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.555265] env[67424]: DEBUG nova.network.neutron [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Updated VIF entry in instance network info cache for port 06066b53-387b-49a9-80f3-d0cb3c4abf4c. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1006.556032] env[67424]: DEBUG nova.network.neutron [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Updating instance_info_cache with network_info: [{"id": "06066b53-387b-49a9-80f3-d0cb3c4abf4c", "address": "fa:16:3e:83:26:6f", "network": {"id": "f8e9d403-2dbb-40b5-8e1d-ea150b305c64", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-995680307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689a0b14ff0b42ad99141803bcb23266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06066b53-38", "ovs_interfaceid": "06066b53-387b-49a9-80f3-d0cb3c4abf4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.565100] env[67424]: DEBUG oslo_concurrency.lockutils [req-429bbe0a-4e53-4179-9f9c-261cc576b417 req-b5e7ff11-3d70-45f2-95c4-576108eb8f83 service nova] Releasing lock "refresh_cache-2489aa3d-1973-4ede-9cae-dab971fa4a7c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.388797] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.389062] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1030.388397] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.387124] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.383594] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.387317] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.387554] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.387751] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.387953] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.388198] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1034.388279] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1034.410047] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.410200] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.410311] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.410438] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.410566] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.410688] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.410809] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.410929] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.411062] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.411185] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1034.411306] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1034.411817] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.422419] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.422637] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.422864] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.422989] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1034.424073] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af833dda-d061-42fb-aba6-3d1923978608 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.433115] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bece4b99-5855-4409-8e70-e4bcc2326d2e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.446904] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904badf0-a62d-4318-9dd1-c668191c5ee7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.454025] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fc13c5-c576-426c-91e8-4ece929707d2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.483366] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181002MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1034.483601] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.483775] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.555478] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.555639] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.555768] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.555892] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.556096] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.556223] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.556338] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.556451] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.556563] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.556675] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.568320] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.578454] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.588929] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72480e41-88d7-4986-86fd-7d98aa82196b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.598785] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 54dc90d0-4f6a-4b16-b1af-dc8c74aef382 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.608704] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3ede76b6-042e-496f-aea7-a1c42166827f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.618347] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 614a4c23-3aee-4dd4-9ca7-534584122c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.627850] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance c0b7de50-17ad-4f8e-9887-345ca08e9d33 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.637014] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 38255062-f950-4b14-90e1-96a30ce2843e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.646318] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 1e5007de-23df-4c65-9210-f460ed564216 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.656076] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cfef8a4c-abc4-4003-a932-e2f823c84e3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.665611] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.676506] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 85b0863a-17ed-4cad-8086-abc6f5755225 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.686920] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.700111] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a0c7d0b0-e26c-4cba-9a45-8e60486e3905 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.700354] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1034.700500] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1034.976588] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1794b44-0b94-4a85-90bd-f0b31a042a7f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.983645] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e364a3bf-669d-4b3e-89cf-7b990df9c2e1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.013186] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3c0fbc-fb93-4e4f-bc78-a22a70e1ad90 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.020285] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b1199e-4b82-4d8e-b569-2304f40fac05 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.033066] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.041669] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1035.054750] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1035.055044] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.571s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.702890] env[67424]: WARNING oslo_vmware.rw_handles [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1048.702890] env[67424]: ERROR oslo_vmware.rw_handles [ 1048.703434] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1048.705569] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1048.705812] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Copying Virtual Disk [datastore2] vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/dcfb037a-3c8b-4855-bc24-fef4bda9a226/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1048.706108] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d0b2441-bf64-44b4-a329-9fa04f253c3f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.714914] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 1048.714914] env[67424]: value = "task-3199980" [ 1048.714914] env[67424]: _type = "Task" [ 1048.714914] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.723097] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.224905] env[67424]: DEBUG oslo_vmware.exceptions [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1049.225250] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.225819] env[67424]: ERROR nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1049.225819] env[67424]: Faults: ['InvalidArgument'] [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Traceback (most recent call last): [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] yield resources [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self.driver.spawn(context, instance, image_meta, [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self._fetch_image_if_missing(context, vi) [ 1049.225819] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] image_cache(vi, tmp_image_ds_loc) [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] vm_util.copy_virtual_disk( [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] session._wait_for_task(vmdk_copy_task) [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] return self.wait_for_task(task_ref) [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] return evt.wait() [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] result = hub.switch() [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1049.226144] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] return self.greenlet.switch() [ 1049.226495] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1049.226495] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self.f(*self.args, **self.kw) [ 1049.226495] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1049.226495] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] raise exceptions.translate_fault(task_info.error) [ 1049.226495] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1049.226495] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Faults: ['InvalidArgument'] [ 1049.226495] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] [ 1049.226495] env[67424]: INFO nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Terminating instance [ 1049.228027] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.228027] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.228210] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21d361e1-106f-46e0-b92c-144a5572bd23 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.230711] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1049.230909] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1049.231657] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654329c5-85ea-4d34-baa6-a1095d451e94 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.239197] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1049.239197] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01333286-a576-407a-b5a0-c135a6a145c4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.241204] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.241387] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1049.242347] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-304df5c5-b8a5-4ceb-a4e4-fd025606895f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.247779] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 1049.247779] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52479766-f854-f5de-9835-677fa8a00fb1" [ 1049.247779] env[67424]: _type = "Task" [ 1049.247779] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.259022] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52479766-f854-f5de-9835-677fa8a00fb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.303611] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1049.303842] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1049.304028] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleting the datastore file [datastore2] cf9e3c04-b1be-41a3-b408-de1f48fa96c6 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.304309] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00c396fd-1c82-4217-8961-e1b84d81ac06 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.310289] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 1049.310289] env[67424]: value = "task-3199982" [ 1049.310289] env[67424]: _type = "Task" [ 1049.310289] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.318585] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.758528] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1049.758946] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating directory with path [datastore2] vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.759021] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22e6f80d-03e9-4cce-a396-8dff2bccd0db {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.770248] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Created directory with path [datastore2] vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.770461] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Fetch image to [datastore2] vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1049.770632] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1049.771421] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4cf9c8-b7f0-4434-b9e2-bdb6ce69c202 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.778969] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecce20e-e2d0-4509-afae-371db0d7d90d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.787873] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c044e99e-0fbc-400b-b0f2-1f91e294a4f1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.820488] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1638365-bf17-4eba-9a94-4e57b9fbc571 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.827283] env[67424]: DEBUG oslo_vmware.api [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076024} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.828756] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.828950] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1049.829138] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1049.829317] env[67424]: INFO nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1049.831782] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cce8fdb7-574a-40c6-a7d5-1a03521f3c30 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.834355] env[67424]: DEBUG nova.compute.claims [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1049.834537] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.834749] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.918150] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1049.981882] env[67424]: DEBUG oslo_vmware.rw_handles [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1050.045746] env[67424]: DEBUG oslo_vmware.rw_handles [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1050.045937] env[67424]: DEBUG oslo_vmware.rw_handles [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1050.213692] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0d6805-b4d5-4e4f-9043-9b18d61ba48c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.221333] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79999280-8a9e-491d-a80d-0c8edc852021 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.252294] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0177508-3474-44d3-99dd-665af652ed99 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.259357] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f172a426-e1e5-4cd2-b651-dcb305d2b7b3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.272467] env[67424]: DEBUG nova.compute.provider_tree [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1050.298175] env[67424]: ERROR nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [req-cbac61ba-a60b-4b58-bd53-fe58b0e21d74] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b21acede-6243-4c82-934a-a3956380220f. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cbac61ba-a60b-4b58-bd53-fe58b0e21d74"}]}: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1050.315060] env[67424]: DEBUG nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1050.330861] env[67424]: DEBUG nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1050.331167] env[67424]: DEBUG nova.compute.provider_tree [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1050.342948] env[67424]: DEBUG nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1050.361021] env[67424]: DEBUG nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1050.615521] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d4d245-e071-4b81-982f-448cc3af2526 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.623073] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01544032-40a6-4b3f-aa5e-42665e064e89 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.652449] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e8e552-e4b9-45e7-bccf-ad6f694c471a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.659234] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3dc9004-6fc5-4494-be43-4c21ceb529a2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.672975] env[67424]: DEBUG nova.compute.provider_tree [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1050.709983] env[67424]: DEBUG nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Updated inventory for provider b21acede-6243-4c82-934a-a3956380220f with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 1050.710403] env[67424]: DEBUG nova.compute.provider_tree [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Updating resource provider b21acede-6243-4c82-934a-a3956380220f generation from 61 to 62 during operation: update_inventory {{(pid=67424) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1050.710478] env[67424]: DEBUG nova.compute.provider_tree [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1050.729689] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.895s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.730251] env[67424]: ERROR nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1050.730251] env[67424]: Faults: ['InvalidArgument'] [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Traceback (most recent call last): [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self.driver.spawn(context, instance, image_meta, [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self._fetch_image_if_missing(context, vi) [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] image_cache(vi, tmp_image_ds_loc) [ 1050.730251] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] vm_util.copy_virtual_disk( [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] session._wait_for_task(vmdk_copy_task) [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] return self.wait_for_task(task_ref) [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] return evt.wait() [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] result = hub.switch() [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] return self.greenlet.switch() [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1050.730882] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] self.f(*self.args, **self.kw) [ 1050.731170] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1050.731170] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] raise exceptions.translate_fault(task_info.error) [ 1050.731170] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1050.731170] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Faults: ['InvalidArgument'] [ 1050.731170] env[67424]: ERROR nova.compute.manager [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] [ 1050.731170] env[67424]: DEBUG nova.compute.utils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1050.732796] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Build of instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 was re-scheduled: A specified parameter was not correct: fileType [ 1050.732796] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1050.733200] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1050.733388] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1050.733561] env[67424]: DEBUG nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1050.733725] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1051.284823] env[67424]: DEBUG nova.network.neutron [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.301660] env[67424]: INFO nova.compute.manager [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: cf9e3c04-b1be-41a3-b408-de1f48fa96c6] Took 0.57 seconds to deallocate network for instance. [ 1051.404537] env[67424]: INFO nova.scheduler.client.report [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleted allocations for instance cf9e3c04-b1be-41a3-b408-de1f48fa96c6 [ 1051.430820] env[67424]: DEBUG oslo_concurrency.lockutils [None req-81a78d1d-a20a-4cb2-a3a3-e4776112ec9e tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "cf9e3c04-b1be-41a3-b408-de1f48fa96c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 392.112s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.443349] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1051.491616] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.491854] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.493315] env[67424]: INFO nova.compute.claims [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1051.814838] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b232a2d-5e4b-4b6b-aaab-f2bb6945b828 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.823052] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a2ec7e-5e99-400a-a74d-48be2e0c3871 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.853198] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f98bbb5-6c9d-4523-b97b-ed398f0adf41 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.860363] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffabab5-d979-4387-9adc-6a6c948c0cf9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.873416] env[67424]: DEBUG nova.compute.provider_tree [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.882326] env[67424]: DEBUG nova.scheduler.client.report [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1051.897175] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.897659] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1051.932950] env[67424]: DEBUG nova.compute.utils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1051.938024] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1051.938024] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1051.944829] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1052.018029] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1052.046122] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1052.046415] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1052.046592] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.046776] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1052.046921] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.047078] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1052.047290] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1052.047452] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1052.047613] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1052.047775] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1052.047947] env[67424]: DEBUG nova.virt.hardware [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1052.048845] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8455ce1e-97af-4e05-807e-6d57ff2d9ffe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.056996] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8088529-6456-44c3-9702-e764d0a4a67f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.138189] env[67424]: DEBUG nova.policy [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91493530b7524006b0dc2ae6b2dce27d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b34765cfc4cb42b8ab2ac6c233672f80', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1052.791815] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Successfully created port: 71a89fe1-60b8-44c8-8ec1-077aab090a1a {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1053.874122] env[67424]: DEBUG nova.compute.manager [req-9d3a2b2f-95d2-45d2-977f-410505840484 req-883d6e2c-07af-4740-8d2f-84732c1f48ae service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Received event network-vif-plugged-71a89fe1-60b8-44c8-8ec1-077aab090a1a {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1053.875096] env[67424]: DEBUG oslo_concurrency.lockutils [req-9d3a2b2f-95d2-45d2-977f-410505840484 req-883d6e2c-07af-4740-8d2f-84732c1f48ae service nova] Acquiring lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.875096] env[67424]: DEBUG oslo_concurrency.lockutils [req-9d3a2b2f-95d2-45d2-977f-410505840484 req-883d6e2c-07af-4740-8d2f-84732c1f48ae service nova] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.875505] env[67424]: DEBUG oslo_concurrency.lockutils [req-9d3a2b2f-95d2-45d2-977f-410505840484 req-883d6e2c-07af-4740-8d2f-84732c1f48ae service nova] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.876040] env[67424]: DEBUG nova.compute.manager [req-9d3a2b2f-95d2-45d2-977f-410505840484 req-883d6e2c-07af-4740-8d2f-84732c1f48ae service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] No waiting events found dispatching network-vif-plugged-71a89fe1-60b8-44c8-8ec1-077aab090a1a {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1053.876040] env[67424]: WARNING nova.compute.manager [req-9d3a2b2f-95d2-45d2-977f-410505840484 req-883d6e2c-07af-4740-8d2f-84732c1f48ae service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Received unexpected event network-vif-plugged-71a89fe1-60b8-44c8-8ec1-077aab090a1a for instance with vm_state building and task_state spawning. [ 1053.928266] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Successfully updated port: 71a89fe1-60b8-44c8-8ec1-077aab090a1a {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1053.938454] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "refresh_cache-3e4e39f2-9267-4076-a302-d5210cb3d5ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.938454] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquired lock "refresh_cache-3e4e39f2-9267-4076-a302-d5210cb3d5ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.938454] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1054.032619] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1054.501566] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Updating instance_info_cache with network_info: [{"id": "71a89fe1-60b8-44c8-8ec1-077aab090a1a", "address": "fa:16:3e:55:95:c2", "network": {"id": "68a7c306-485a-4a95-8d46-8824fe33151c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1328787388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34765cfc4cb42b8ab2ac6c233672f80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a89fe1-60", "ovs_interfaceid": "71a89fe1-60b8-44c8-8ec1-077aab090a1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.516794] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Releasing lock "refresh_cache-3e4e39f2-9267-4076-a302-d5210cb3d5ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.517124] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Instance network_info: |[{"id": "71a89fe1-60b8-44c8-8ec1-077aab090a1a", "address": "fa:16:3e:55:95:c2", "network": {"id": "68a7c306-485a-4a95-8d46-8824fe33151c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1328787388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34765cfc4cb42b8ab2ac6c233672f80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a89fe1-60", "ovs_interfaceid": "71a89fe1-60b8-44c8-8ec1-077aab090a1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1054.517585] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:95:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cebc48c-6a77-46bf-9c12-ac130e4d7d76', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71a89fe1-60b8-44c8-8ec1-077aab090a1a', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1054.525499] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Creating folder: Project (b34765cfc4cb42b8ab2ac6c233672f80). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1054.526236] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ebdbf30-9f6a-4602-b3c1-24e17ad688bd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.538790] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Created folder: Project (b34765cfc4cb42b8ab2ac6c233672f80) in parent group-v639843. [ 1054.539329] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Creating folder: Instances. Parent ref: group-v639902. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1054.539329] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e00b4a7a-4dca-4ed9-951f-796a54783c39 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.549789] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Created folder: Instances in parent group-v639902. [ 1054.549789] env[67424]: DEBUG oslo.service.loopingcall [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1054.549789] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1054.549789] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f78dbdca-64e7-4136-9437-f9bca67de657 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.571917] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1054.571917] env[67424]: value = "task-3199985" [ 1054.571917] env[67424]: _type = "Task" [ 1054.571917] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.580011] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199985, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.084326] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199985, 'name': CreateVM_Task} progress is 99%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.583507] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199985, 'name': CreateVM_Task} progress is 99%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.906714] env[67424]: DEBUG nova.compute.manager [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Received event network-changed-71a89fe1-60b8-44c8-8ec1-077aab090a1a {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1055.907746] env[67424]: DEBUG nova.compute.manager [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Refreshing instance network info cache due to event network-changed-71a89fe1-60b8-44c8-8ec1-077aab090a1a. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1055.907746] env[67424]: DEBUG oslo_concurrency.lockutils [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] Acquiring lock "refresh_cache-3e4e39f2-9267-4076-a302-d5210cb3d5ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.907746] env[67424]: DEBUG oslo_concurrency.lockutils [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] Acquired lock "refresh_cache-3e4e39f2-9267-4076-a302-d5210cb3d5ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.907746] env[67424]: DEBUG nova.network.neutron [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Refreshing network info cache for port 71a89fe1-60b8-44c8-8ec1-077aab090a1a {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1056.083013] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199985, 'name': CreateVM_Task, 'duration_secs': 1.381175} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.083315] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1056.083968] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.084152] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.084467] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1056.084709] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-932004e7-d30f-474c-86f7-5d37c7070d73 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.089115] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Waiting for the task: (returnval){ [ 1056.089115] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]522acb8f-5285-2117-dcb3-3a56c668df61" [ 1056.089115] env[67424]: _type = "Task" [ 1056.089115] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.096378] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]522acb8f-5285-2117-dcb3-3a56c668df61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.396751] env[67424]: DEBUG nova.network.neutron [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Updated VIF entry in instance network info cache for port 71a89fe1-60b8-44c8-8ec1-077aab090a1a. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1056.397129] env[67424]: DEBUG nova.network.neutron [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Updating instance_info_cache with network_info: [{"id": "71a89fe1-60b8-44c8-8ec1-077aab090a1a", "address": "fa:16:3e:55:95:c2", "network": {"id": "68a7c306-485a-4a95-8d46-8824fe33151c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1328787388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b34765cfc4cb42b8ab2ac6c233672f80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cebc48c-6a77-46bf-9c12-ac130e4d7d76", "external-id": "nsx-vlan-transportzone-382", "segmentation_id": 382, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a89fe1-60", "ovs_interfaceid": "71a89fe1-60b8-44c8-8ec1-077aab090a1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.406646] env[67424]: DEBUG oslo_concurrency.lockutils [req-6b9a7e64-2ca0-42bd-9acb-0f78124315ab req-27a2b2da-f306-485d-a223-5336ab325ff7 service nova] Releasing lock "refresh_cache-3e4e39f2-9267-4076-a302-d5210cb3d5ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.600305] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.600568] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1056.600828] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.436915] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.436915] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.353418] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "15544bb1-1353-4b19-ac1e-967f2e43713e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.353418] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.382508] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "4b3afdb2-16fd-453c-b831-7ad5a0a74772" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.382881] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "4b3afdb2-16fd-453c-b831-7ad5a0a74772" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.545704] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Acquiring lock "0e1ff97a-fcea-4a95-a9fb-d35797c914fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.546049] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Lock "0e1ff97a-fcea-4a95-a9fb-d35797c914fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.576401] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Acquiring lock "2580169a-b80b-43fe-bd63-9a09723a691e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.576690] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Lock "2580169a-b80b-43fe-bd63-9a09723a691e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.607223] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Acquiring lock "5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.607552] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Lock "5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.661099] env[67424]: DEBUG oslo_concurrency.lockutils [None req-22094edd-9a40-4511-ac5e-cadf8f48d499 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "3525b872-dfbb-44d6-853d-8d0612cec3f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.661414] env[67424]: DEBUG oslo_concurrency.lockutils [None req-22094edd-9a40-4511-ac5e-cadf8f48d499 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "3525b872-dfbb-44d6-853d-8d0612cec3f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.033025] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.033025] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1090.388733] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.383900] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.957048] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d91e02eb-096a-40cf-8049-25b24e55c021 tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "2ad24d5c-afc8-435e-a9b7-3b25a7ffd587" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.957048] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d91e02eb-096a-40cf-8049-25b24e55c021 tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "2ad24d5c-afc8-435e-a9b7-3b25a7ffd587" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.382933] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.410691] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.411027] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.411240] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.411412] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.387425] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.399105] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.399353] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.399520] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.399698] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1094.400772] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65abb0a2-1af1-4e68-b7cb-b4455120c9a1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.410873] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a31abe-c136-457b-8336-312b68b8d981 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.426293] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee85145-61f9-40a9-870e-11c098dd390c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.432651] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447fea5b-b2dd-4710-bdf3-0cdc697554f9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.461209] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181000MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1094.461381] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.461558] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.533050] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533050] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533050] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533050] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533273] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533273] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533273] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533273] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533402] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.533436] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.544309] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.553490] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72480e41-88d7-4986-86fd-7d98aa82196b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.562581] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 54dc90d0-4f6a-4b16-b1af-dc8c74aef382 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.572245] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3ede76b6-042e-496f-aea7-a1c42166827f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.581237] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 614a4c23-3aee-4dd4-9ca7-534584122c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.590068] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance c0b7de50-17ad-4f8e-9887-345ca08e9d33 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.599833] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 38255062-f950-4b14-90e1-96a30ce2843e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.609461] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 1e5007de-23df-4c65-9210-f460ed564216 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.620261] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cfef8a4c-abc4-4003-a932-e2f823c84e3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.630041] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.639301] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 85b0863a-17ed-4cad-8086-abc6f5755225 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.649901] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.659741] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a0c7d0b0-e26c-4cba-9a45-8e60486e3905 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.669643] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.679147] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.688902] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4b3afdb2-16fd-453c-b831-7ad5a0a74772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.698793] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0e1ff97a-fcea-4a95-a9fb-d35797c914fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.708196] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2580169a-b80b-43fe-bd63-9a09723a691e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.718640] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.728310] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3525b872-dfbb-44d6-853d-8d0612cec3f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.737862] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2ad24d5c-afc8-435e-a9b7-3b25a7ffd587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1094.738113] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1094.738282] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1095.079469] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b592467a-eba5-44e5-85d7-ccb9f0ea9a82 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.087406] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f59c58-1c47-4068-b0de-ab72bb8da53c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.116680] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9534dc2-ea0f-479d-8c3d-bda8f4fee01f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.123548] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8246186-1063-41a8-a394-a71702912906 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.136282] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.144374] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1095.158194] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1095.158383] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.697s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.159013] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.159394] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1097.159394] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1097.185111] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.185311] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.185443] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.185566] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.185686] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.185801] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.186047] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.186254] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.186391] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.186514] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1097.186635] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1097.451023] env[67424]: WARNING oslo_vmware.rw_handles [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1097.451023] env[67424]: ERROR oslo_vmware.rw_handles [ 1097.451493] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1097.453646] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1097.456141] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Copying Virtual Disk [datastore2] vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/e9ed7c4f-6801-48b7-9e45-9d7d6de79844/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1097.456499] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e48f950-061e-4f45-bc25-2470dad4a7f8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.464526] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 1097.464526] env[67424]: value = "task-3199986" [ 1097.464526] env[67424]: _type = "Task" [ 1097.464526] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.472270] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.975335] env[67424]: DEBUG oslo_vmware.exceptions [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1097.975998] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.976526] env[67424]: ERROR nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1097.976526] env[67424]: Faults: ['InvalidArgument'] [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Traceback (most recent call last): [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] yield resources [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self.driver.spawn(context, instance, image_meta, [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self._fetch_image_if_missing(context, vi) [ 1097.976526] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] image_cache(vi, tmp_image_ds_loc) [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] vm_util.copy_virtual_disk( [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] session._wait_for_task(vmdk_copy_task) [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] return self.wait_for_task(task_ref) [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] return evt.wait() [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] result = hub.switch() [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1097.977109] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] return self.greenlet.switch() [ 1097.977508] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1097.977508] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self.f(*self.args, **self.kw) [ 1097.977508] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1097.977508] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] raise exceptions.translate_fault(task_info.error) [ 1097.977508] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1097.977508] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Faults: ['InvalidArgument'] [ 1097.977508] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] [ 1097.977508] env[67424]: INFO nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Terminating instance [ 1097.978962] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.979179] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1097.979903] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1097.980163] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1097.980360] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf7f0134-ebe9-4038-8e57-aaf79e10c606 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.983699] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd43452-c6dc-4822-9d33-c748ea2127b6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.990985] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1097.991242] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9e79840-81b8-4bee-8772-07dc429cbb7f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.993438] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1097.993614] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1097.994579] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50a1703d-577f-4c40-86e6-ba5c3b51962f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.999379] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Waiting for the task: (returnval){ [ 1097.999379] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52e6eebc-3b55-f680-ddf5-b790007a4e3f" [ 1097.999379] env[67424]: _type = "Task" [ 1097.999379] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.006540] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52e6eebc-3b55-f680-ddf5-b790007a4e3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.063429] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1098.063652] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1098.063887] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleting the datastore file [datastore2] bb35e63a-3fb6-4a2a-8037-3fcc16def092 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.064184] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13aea232-3f78-402e-8a3d-ba0f17ec9698 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.070184] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for the task: (returnval){ [ 1098.070184] env[67424]: value = "task-3199988" [ 1098.070184] env[67424]: _type = "Task" [ 1098.070184] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.078406] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.510460] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1098.510817] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Creating directory with path [datastore2] vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1098.510955] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d77cd580-55c8-44ce-b6bd-4630bc76a88f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.523028] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Created directory with path [datastore2] vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1098.523254] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Fetch image to [datastore2] vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1098.523426] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1098.524229] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abee8d33-5d6d-435c-bea2-4ac0119db084 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.531475] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33268ecf-489d-4bd7-8f0a-8708204492cb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.541385] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa18c179-8d01-4a2e-bea2-9e3d8628b691 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.576932] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b5a706-c071-4523-ab0a-5b26efe15f8f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.584900] env[67424]: DEBUG oslo_vmware.api [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Task: {'id': task-3199988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075227} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.588021] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.588388] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1098.588582] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1098.588765] env[67424]: INFO nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1098.591103] env[67424]: DEBUG nova.compute.claims [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1098.591372] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.591502] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.594282] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dbbae0ff-282c-4f18-b990-62277bf5f0f0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.617215] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1098.689956] env[67424]: DEBUG oslo_vmware.rw_handles [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1098.755677] env[67424]: DEBUG oslo_vmware.rw_handles [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1098.755943] env[67424]: DEBUG oslo_vmware.rw_handles [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1098.942788] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4d984c4e-5f04-4592-bd6b-5c4f1117ef50 tempest-ServersAdminNegativeTestJSON-646082912 tempest-ServersAdminNegativeTestJSON-646082912-project-member] Acquiring lock "9eeec7b3-1a9f-4783-a84e-970d5a85129e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.943082] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4d984c4e-5f04-4592-bd6b-5c4f1117ef50 tempest-ServersAdminNegativeTestJSON-646082912 tempest-ServersAdminNegativeTestJSON-646082912-project-member] Lock "9eeec7b3-1a9f-4783-a84e-970d5a85129e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.181872] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d10b47-910a-4a51-9e87-aaeb0fd0e337 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.189622] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d0396a-1575-49c4-96c6-b47677083d5b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.220963] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a9700a-1c3c-44ef-8a25-b46511a56ec7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.228687] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b2b6de-4bdb-42f5-9681-ce4758bd8e6f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.242780] env[67424]: DEBUG nova.compute.provider_tree [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.251742] env[67424]: DEBUG nova.scheduler.client.report [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1099.268513] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.677s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.268996] env[67424]: ERROR nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1099.268996] env[67424]: Faults: ['InvalidArgument'] [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Traceback (most recent call last): [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self.driver.spawn(context, instance, image_meta, [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self._fetch_image_if_missing(context, vi) [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] image_cache(vi, tmp_image_ds_loc) [ 1099.268996] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] vm_util.copy_virtual_disk( [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] session._wait_for_task(vmdk_copy_task) [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] return self.wait_for_task(task_ref) [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] return evt.wait() [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] result = hub.switch() [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] return self.greenlet.switch() [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1099.269725] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] self.f(*self.args, **self.kw) [ 1099.270104] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1099.270104] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] raise exceptions.translate_fault(task_info.error) [ 1099.270104] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1099.270104] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Faults: ['InvalidArgument'] [ 1099.270104] env[67424]: ERROR nova.compute.manager [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] [ 1099.270104] env[67424]: DEBUG nova.compute.utils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1099.271293] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Build of instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 was re-scheduled: A specified parameter was not correct: fileType [ 1099.271293] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1099.271818] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1099.272107] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1099.272301] env[67424]: DEBUG nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1099.272624] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1099.459329] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.531434] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.848278] env[67424]: DEBUG nova.network.neutron [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.862165] env[67424]: INFO nova.compute.manager [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Took 0.59 seconds to deallocate network for instance. [ 1099.975224] env[67424]: INFO nova.scheduler.client.report [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Deleted allocations for instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 [ 1099.994945] env[67424]: DEBUG oslo_concurrency.lockutils [None req-69227aad-f6b4-4f19-86ed-7630cca994e3 tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 437.532s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.996309] env[67424]: DEBUG oslo_concurrency.lockutils [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 237.532s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.996570] env[67424]: DEBUG oslo_concurrency.lockutils [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Acquiring lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.997012] env[67424]: DEBUG oslo_concurrency.lockutils [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.997248] env[67424]: DEBUG oslo_concurrency.lockutils [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.999640] env[67424]: INFO nova.compute.manager [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Terminating instance [ 1100.003037] env[67424]: DEBUG nova.compute.manager [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1100.003122] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1100.003334] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bc9c433-1a89-4d7c-a819-87a8e47e16b8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.011750] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1100.020023] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23b6567-5d2f-4449-9edf-c31acd0262a2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.050190] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb35e63a-3fb6-4a2a-8037-3fcc16def092 could not be found. [ 1100.050360] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1100.050541] env[67424]: INFO nova.compute.manager [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1100.050795] env[67424]: DEBUG oslo.service.loopingcall [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1100.053010] env[67424]: DEBUG nova.compute.manager [-] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1100.053119] env[67424]: DEBUG nova.network.neutron [-] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1100.066811] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.067206] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.068568] env[67424]: INFO nova.compute.claims [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.099915] env[67424]: DEBUG nova.network.neutron [-] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.117370] env[67424]: INFO nova.compute.manager [-] [instance: bb35e63a-3fb6-4a2a-8037-3fcc16def092] Took 0.06 seconds to deallocate network for instance. [ 1100.230847] env[67424]: DEBUG oslo_concurrency.lockutils [None req-836d1d49-df5b-4d2c-959a-da8eb3aa7d3b tempest-ListServerFiltersTestJSON-1749227928 tempest-ListServerFiltersTestJSON-1749227928-project-member] Lock "bb35e63a-3fb6-4a2a-8037-3fcc16def092" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.234s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.501519] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbdab0c-a2d3-471c-92f2-616518edce41 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.509556] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53814ca-03a7-491c-9a88-60050971a419 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.540175] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78dfe31-406e-4547-a8eb-3d6fa36db4c2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.547473] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ebc609-2d57-4c1d-81a7-36823dec52e9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.560901] env[67424]: DEBUG nova.compute.provider_tree [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.570009] env[67424]: DEBUG nova.scheduler.client.report [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1100.583601] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.517s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.584124] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1100.617229] env[67424]: DEBUG nova.compute.utils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1100.618978] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1100.618978] env[67424]: DEBUG nova.network.neutron [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1100.630279] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1100.693265] env[67424]: DEBUG nova.policy [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48d5393c8dd44b8980673424bf58c3ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb47f8f5ec7645adbd5f78719656f0e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1100.696434] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1100.724219] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1100.724567] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1100.724802] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1100.725086] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1100.725315] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1100.725540] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1100.725837] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1100.726116] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1100.726373] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1100.726732] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1100.726875] env[67424]: DEBUG nova.virt.hardware [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1100.728090] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fb4173-b2e4-4f19-bb6c-acd23960305b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.738830] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44dd6c6-0e47-4042-bee4-0e8ac1decc79 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.393286] env[67424]: DEBUG nova.network.neutron [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Successfully created port: 7851a046-a6e4-4de0-b880-abc0e22ba7c1 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1102.273145] env[67424]: DEBUG nova.compute.manager [req-c13b2417-1846-4f84-b779-0e6d4d604ca8 req-72445ef2-de53-4012-a578-43f41b5a869b service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Received event network-vif-plugged-7851a046-a6e4-4de0-b880-abc0e22ba7c1 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1102.273145] env[67424]: DEBUG oslo_concurrency.lockutils [req-c13b2417-1846-4f84-b779-0e6d4d604ca8 req-72445ef2-de53-4012-a578-43f41b5a869b service nova] Acquiring lock "4e370013-5dfb-467c-8709-c0a0b256a9aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.273145] env[67424]: DEBUG oslo_concurrency.lockutils [req-c13b2417-1846-4f84-b779-0e6d4d604ca8 req-72445ef2-de53-4012-a578-43f41b5a869b service nova] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.273145] env[67424]: DEBUG oslo_concurrency.lockutils [req-c13b2417-1846-4f84-b779-0e6d4d604ca8 req-72445ef2-de53-4012-a578-43f41b5a869b service nova] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.273598] env[67424]: DEBUG nova.compute.manager [req-c13b2417-1846-4f84-b779-0e6d4d604ca8 req-72445ef2-de53-4012-a578-43f41b5a869b service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] No waiting events found dispatching network-vif-plugged-7851a046-a6e4-4de0-b880-abc0e22ba7c1 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1102.273598] env[67424]: WARNING nova.compute.manager [req-c13b2417-1846-4f84-b779-0e6d4d604ca8 req-72445ef2-de53-4012-a578-43f41b5a869b service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Received unexpected event network-vif-plugged-7851a046-a6e4-4de0-b880-abc0e22ba7c1 for instance with vm_state building and task_state spawning. [ 1102.420242] env[67424]: DEBUG nova.network.neutron [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Successfully updated port: 7851a046-a6e4-4de0-b880-abc0e22ba7c1 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1102.432180] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "refresh_cache-4e370013-5dfb-467c-8709-c0a0b256a9aa" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.432180] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired lock "refresh_cache-4e370013-5dfb-467c-8709-c0a0b256a9aa" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.432587] env[67424]: DEBUG nova.network.neutron [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1102.496959] env[67424]: DEBUG nova.network.neutron [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1102.733586] env[67424]: DEBUG nova.network.neutron [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Updating instance_info_cache with network_info: [{"id": "7851a046-a6e4-4de0-b880-abc0e22ba7c1", "address": "fa:16:3e:4f:4b:a2", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7851a046-a6", "ovs_interfaceid": "7851a046-a6e4-4de0-b880-abc0e22ba7c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.747051] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Releasing lock "refresh_cache-4e370013-5dfb-467c-8709-c0a0b256a9aa" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.747364] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Instance network_info: |[{"id": "7851a046-a6e4-4de0-b880-abc0e22ba7c1", "address": "fa:16:3e:4f:4b:a2", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7851a046-a6", "ovs_interfaceid": "7851a046-a6e4-4de0-b880-abc0e22ba7c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1102.747771] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:4b:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7851a046-a6e4-4de0-b880-abc0e22ba7c1', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1102.755338] env[67424]: DEBUG oslo.service.loopingcall [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1102.755840] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1102.756122] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-285510c2-d8dd-4fa2-a231-8f7a9e51a4e7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.777241] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1102.777241] env[67424]: value = "task-3199989" [ 1102.777241] env[67424]: _type = "Task" [ 1102.777241] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.784899] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199989, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.036094] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.289122] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199989, 'name': CreateVM_Task, 'duration_secs': 0.288057} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.289122] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1103.289122] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.289725] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.289725] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1103.289803] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce74f4e1-7bf9-4128-bf7d-a2cba8740cfd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.294382] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 1103.294382] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5219efe7-b4ed-7125-d3ba-9cfda1e7c591" [ 1103.294382] env[67424]: _type = "Task" [ 1103.294382] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.303028] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5219efe7-b4ed-7125-d3ba-9cfda1e7c591, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.804517] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.804773] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1103.804989] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.350163] env[67424]: DEBUG nova.compute.manager [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Received event network-changed-7851a046-a6e4-4de0-b880-abc0e22ba7c1 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1104.350408] env[67424]: DEBUG nova.compute.manager [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Refreshing instance network info cache due to event network-changed-7851a046-a6e4-4de0-b880-abc0e22ba7c1. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1104.350606] env[67424]: DEBUG oslo_concurrency.lockutils [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] Acquiring lock "refresh_cache-4e370013-5dfb-467c-8709-c0a0b256a9aa" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.350743] env[67424]: DEBUG oslo_concurrency.lockutils [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] Acquired lock "refresh_cache-4e370013-5dfb-467c-8709-c0a0b256a9aa" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.350903] env[67424]: DEBUG nova.network.neutron [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Refreshing network info cache for port 7851a046-a6e4-4de0-b880-abc0e22ba7c1 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1104.957741] env[67424]: DEBUG nova.network.neutron [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Updated VIF entry in instance network info cache for port 7851a046-a6e4-4de0-b880-abc0e22ba7c1. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1104.958153] env[67424]: DEBUG nova.network.neutron [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Updating instance_info_cache with network_info: [{"id": "7851a046-a6e4-4de0-b880-abc0e22ba7c1", "address": "fa:16:3e:4f:4b:a2", "network": {"id": "27565a97-13c7-423d-b01b-5cee8fa0b0ca", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0b2360eb1420490db3cb04c00583f0da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7851a046-a6", "ovs_interfaceid": "7851a046-a6e4-4de0-b880-abc0e22ba7c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.969860] env[67424]: DEBUG oslo_concurrency.lockutils [req-4ec29ff5-90c6-429d-a0b0-85d9e6d7fc73 req-83f8ded0-dad5-4fea-a00e-88bb33533697 service nova] Releasing lock "refresh_cache-4e370013-5dfb-467c-8709-c0a0b256a9aa" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.822416] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "4c3e649d-52e8-4c3d-9f0b-19077db44543" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.822942] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.706246] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5991818-0e9b-4218-a1da-29233d59e07c tempest-ServerMetadataTestJSON-543726691 tempest-ServerMetadataTestJSON-543726691-project-member] Acquiring lock "28e7dff8-b59f-4357-a5b7-48e713d59fac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.706246] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5991818-0e9b-4218-a1da-29233d59e07c tempest-ServerMetadataTestJSON-543726691 tempest-ServerMetadataTestJSON-543726691-project-member] Lock "28e7dff8-b59f-4357-a5b7-48e713d59fac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.989896] env[67424]: DEBUG oslo_concurrency.lockutils [None req-412016ff-7c3d-4ba9-be74-8c0d736ec4b1 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Acquiring lock "2ad72ba1-1f79-4a2c-b411-3e0f51ca342b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.990200] env[67424]: DEBUG oslo_concurrency.lockutils [None req-412016ff-7c3d-4ba9-be74-8c0d736ec4b1 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Lock "2ad72ba1-1f79-4a2c-b411-3e0f51ca342b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.111979] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0616d565-5ce4-4790-b209-62be7a7a8eac tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] Acquiring lock "654172d5-94b4-427b-930d-7e8d1fa31d36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.112291] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0616d565-5ce4-4790-b209-62be7a7a8eac tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] Lock "654172d5-94b4-427b-930d-7e8d1fa31d36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.889021] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3519e644-fb6a-4107-bfcf-8816187984c4 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Acquiring lock "b12975c8-0bce-41b6-afae-f78d34a9309f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.889021] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3519e644-fb6a-4107-bfcf-8816187984c4 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "b12975c8-0bce-41b6-afae-f78d34a9309f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.828136] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7f26083d-403e-4ff7-a68d-0486726951e6 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] Acquiring lock "784fdc06-dfeb-403b-b0b6-38399a4cf972" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.828460] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7f26083d-403e-4ff7-a68d-0486726951e6 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] Lock "784fdc06-dfeb-403b-b0b6-38399a4cf972" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.347143] env[67424]: DEBUG oslo_concurrency.lockutils [None req-128e19a6-605b-4ff1-baf8-62d15cb44586 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] Acquiring lock "fa2a9d05-598d-41ab-9b74-a3b50d49777d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.347143] env[67424]: DEBUG oslo_concurrency.lockutils [None req-128e19a6-605b-4ff1-baf8-62d15cb44586 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] Lock "fa2a9d05-598d-41ab-9b74-a3b50d49777d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.490549] env[67424]: WARNING oslo_vmware.rw_handles [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1147.490549] env[67424]: ERROR oslo_vmware.rw_handles [ 1147.491168] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1147.492953] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1147.498255] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Copying Virtual Disk [datastore2] vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/04fed971-609d-4c92-bb6e-0ec56e96935b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1147.498458] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-843434c6-0763-46cc-b723-ca2ae1ae82f4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.510385] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Waiting for the task: (returnval){ [ 1147.510385] env[67424]: value = "task-3199990" [ 1147.510385] env[67424]: _type = "Task" [ 1147.510385] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.516535] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Task: {'id': task-3199990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.877693] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fbc43216-8dfa-4a4e-9957-4a0af7e52239 tempest-ServersNegativeTestJSON-1738428696 tempest-ServersNegativeTestJSON-1738428696-project-member] Acquiring lock "2f42c7da-46a0-4ae2-9ac9-92527183814b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.877968] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fbc43216-8dfa-4a4e-9957-4a0af7e52239 tempest-ServersNegativeTestJSON-1738428696 tempest-ServersNegativeTestJSON-1738428696-project-member] Lock "2f42c7da-46a0-4ae2-9ac9-92527183814b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.020767] env[67424]: DEBUG oslo_vmware.exceptions [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1148.021385] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.022137] env[67424]: ERROR nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1148.022137] env[67424]: Faults: ['InvalidArgument'] [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Traceback (most recent call last): [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] yield resources [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self.driver.spawn(context, instance, image_meta, [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self._fetch_image_if_missing(context, vi) [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1148.022137] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] image_cache(vi, tmp_image_ds_loc) [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] vm_util.copy_virtual_disk( [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] session._wait_for_task(vmdk_copy_task) [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] return self.wait_for_task(task_ref) [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] return evt.wait() [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] result = hub.switch() [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] return self.greenlet.switch() [ 1148.022666] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1148.023090] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self.f(*self.args, **self.kw) [ 1148.023090] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1148.023090] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] raise exceptions.translate_fault(task_info.error) [ 1148.023090] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1148.023090] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Faults: ['InvalidArgument'] [ 1148.023090] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] [ 1148.023090] env[67424]: INFO nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Terminating instance [ 1148.024754] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.025053] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1148.025782] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1148.026014] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1148.026299] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-665d3a45-ee67-48cc-90b6-f49ac0261d86 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.029295] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d6ec30-501d-4081-aac0-9ed5cbd4afcf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.039777] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1148.040225] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c992129e-93e8-42a3-a192-25cbeb2fa284 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.043656] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1148.043851] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1148.045182] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-223fb26a-4985-4b25-9fc4-553948440dfc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.051045] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Waiting for the task: (returnval){ [ 1148.051045] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d3da4c-b174-b672-daf5-3521ab47a8bc" [ 1148.051045] env[67424]: _type = "Task" [ 1148.051045] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.061375] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52d3da4c-b174-b672-daf5-3521ab47a8bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.109907] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1148.110252] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1148.110367] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Deleting the datastore file [datastore2] 91fdd93f-a6ef-44ad-b842-6d9b3173e626 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1148.110651] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a54ac6d0-667a-4a17-8454-b05d297eba39 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.117140] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Waiting for the task: (returnval){ [ 1148.117140] env[67424]: value = "task-3199992" [ 1148.117140] env[67424]: _type = "Task" [ 1148.117140] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.124822] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Task: {'id': task-3199992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.387468] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.387645] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1148.561980] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1148.562315] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Creating directory with path [datastore2] vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1148.562848] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0345820-4c05-44ef-b6c6-be1230e2d0f4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.574084] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Created directory with path [datastore2] vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1148.574280] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Fetch image to [datastore2] vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1148.574448] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1148.575311] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1f9963-24d2-42d5-9cdd-df98e13933c9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.581936] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8dfd48-5314-4dcb-aa2b-d24e1b6b9bbd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.591933] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019ad4a3-0121-4529-9778-32e112da4a08 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.625090] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf939d6-361b-4e2f-8422-93d7878b04ce {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.631803] env[67424]: DEBUG oslo_vmware.api [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Task: {'id': task-3199992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079073} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.633155] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1148.633350] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1148.633525] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1148.633694] env[67424]: INFO nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1148.635546] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-31b56bcf-9f7b-4ac8-9431-60ac894842bc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.637410] env[67424]: DEBUG nova.compute.claims [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1148.637581] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.637798] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.658996] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1148.713528] env[67424]: DEBUG oslo_vmware.rw_handles [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1148.775824] env[67424]: DEBUG oslo_vmware.rw_handles [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1148.776033] env[67424]: DEBUG oslo_vmware.rw_handles [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1149.121686] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76aeb096-3a13-43a3-9d58-4c2c757212c6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.129886] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd1c3a9-815d-4b70-b7d3-11c0ab42d2a7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.165889] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696ecf28-b4e7-4d4a-93a6-52906cd86429 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.174044] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98e0e86-f1b3-4e04-9557-53b58890f506 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.186227] env[67424]: DEBUG nova.compute.provider_tree [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.197230] env[67424]: DEBUG nova.scheduler.client.report [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1149.213286] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.575s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.213801] env[67424]: ERROR nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1149.213801] env[67424]: Faults: ['InvalidArgument'] [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Traceback (most recent call last): [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self.driver.spawn(context, instance, image_meta, [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self._fetch_image_if_missing(context, vi) [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] image_cache(vi, tmp_image_ds_loc) [ 1149.213801] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] vm_util.copy_virtual_disk( [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] session._wait_for_task(vmdk_copy_task) [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] return self.wait_for_task(task_ref) [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] return evt.wait() [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] result = hub.switch() [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] return self.greenlet.switch() [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1149.214181] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] self.f(*self.args, **self.kw) [ 1149.214543] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1149.214543] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] raise exceptions.translate_fault(task_info.error) [ 1149.214543] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1149.214543] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Faults: ['InvalidArgument'] [ 1149.214543] env[67424]: ERROR nova.compute.manager [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] [ 1149.214543] env[67424]: DEBUG nova.compute.utils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1149.215917] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Build of instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 was re-scheduled: A specified parameter was not correct: fileType [ 1149.215917] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1149.216305] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1149.216478] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1149.216646] env[67424]: DEBUG nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1149.216826] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1149.857747] env[67424]: DEBUG nova.network.neutron [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.868839] env[67424]: INFO nova.compute.manager [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Took 0.65 seconds to deallocate network for instance. [ 1149.976970] env[67424]: INFO nova.scheduler.client.report [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Deleted allocations for instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 [ 1149.998524] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2cac8d3b-946c-4bcc-a15c-8c2c683a248d tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 481.595s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.000238] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 283.248s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.000604] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Acquiring lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.000840] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.001019] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.004710] env[67424]: INFO nova.compute.manager [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Terminating instance [ 1150.006824] env[67424]: DEBUG nova.compute.manager [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1150.007046] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1150.007304] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef10217a-1738-4233-b129-1e21c9ea6036 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.011068] env[67424]: DEBUG nova.compute.manager [None req-cb44eba5-e7db-4acd-97a7-d40713e7eac6 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 72480e41-88d7-4986-86fd-7d98aa82196b] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.017918] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a493bf-d3e0-4e9c-a893-01bb817fbe42 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.049841] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91fdd93f-a6ef-44ad-b842-6d9b3173e626 could not be found. [ 1150.050125] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1150.050484] env[67424]: INFO nova.compute.manager [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1150.050768] env[67424]: DEBUG oslo.service.loopingcall [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1150.051586] env[67424]: DEBUG nova.compute.manager [None req-cb44eba5-e7db-4acd-97a7-d40713e7eac6 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 72480e41-88d7-4986-86fd-7d98aa82196b] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.053424] env[67424]: DEBUG nova.compute.manager [-] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1150.053557] env[67424]: DEBUG nova.network.neutron [-] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1150.077391] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cb44eba5-e7db-4acd-97a7-d40713e7eac6 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "72480e41-88d7-4986-86fd-7d98aa82196b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.914s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.081389] env[67424]: DEBUG nova.network.neutron [-] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.087033] env[67424]: DEBUG nova.compute.manager [None req-f366d4dc-d7ce-4bf2-aad5-ce7f6843ba49 tempest-ServerRescueTestJSONUnderV235-1011947938 tempest-ServerRescueTestJSONUnderV235-1011947938-project-member] [instance: 54dc90d0-4f6a-4b16-b1af-dc8c74aef382] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.090492] env[67424]: INFO nova.compute.manager [-] [instance: 91fdd93f-a6ef-44ad-b842-6d9b3173e626] Took 0.04 seconds to deallocate network for instance. [ 1150.110362] env[67424]: DEBUG nova.compute.manager [None req-f366d4dc-d7ce-4bf2-aad5-ce7f6843ba49 tempest-ServerRescueTestJSONUnderV235-1011947938 tempest-ServerRescueTestJSONUnderV235-1011947938-project-member] [instance: 54dc90d0-4f6a-4b16-b1af-dc8c74aef382] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.141662] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f366d4dc-d7ce-4bf2-aad5-ce7f6843ba49 tempest-ServerRescueTestJSONUnderV235-1011947938 tempest-ServerRescueTestJSONUnderV235-1011947938-project-member] Lock "54dc90d0-4f6a-4b16-b1af-dc8c74aef382" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.184s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.151494] env[67424]: DEBUG nova.compute.manager [None req-42b670cb-a699-4194-9ee6-20d80a9d0b11 tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] [instance: 3ede76b6-042e-496f-aea7-a1c42166827f] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.174566] env[67424]: DEBUG nova.compute.manager [None req-42b670cb-a699-4194-9ee6-20d80a9d0b11 tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] [instance: 3ede76b6-042e-496f-aea7-a1c42166827f] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.195118] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f405cb6c-7f06-4488-a0fa-22dcde471474 tempest-ServersTestJSON-393926358 tempest-ServersTestJSON-393926358-project-member] Lock "91fdd93f-a6ef-44ad-b842-6d9b3173e626" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.212570] env[67424]: DEBUG oslo_concurrency.lockutils [None req-42b670cb-a699-4194-9ee6-20d80a9d0b11 tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] Lock "3ede76b6-042e-496f-aea7-a1c42166827f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.146s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.221786] env[67424]: DEBUG nova.compute.manager [None req-7ad2c225-49f3-4b9e-b565-7b9d88281994 tempest-FloatingIPsAssociationNegativeTestJSON-122126100 tempest-FloatingIPsAssociationNegativeTestJSON-122126100-project-member] [instance: 614a4c23-3aee-4dd4-9ca7-534584122c00] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.269296] env[67424]: DEBUG nova.compute.manager [None req-7ad2c225-49f3-4b9e-b565-7b9d88281994 tempest-FloatingIPsAssociationNegativeTestJSON-122126100 tempest-FloatingIPsAssociationNegativeTestJSON-122126100-project-member] [instance: 614a4c23-3aee-4dd4-9ca7-534584122c00] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.291671] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ad2c225-49f3-4b9e-b565-7b9d88281994 tempest-FloatingIPsAssociationNegativeTestJSON-122126100 tempest-FloatingIPsAssociationNegativeTestJSON-122126100-project-member] Lock "614a4c23-3aee-4dd4-9ca7-534584122c00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.452s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.304228] env[67424]: DEBUG nova.compute.manager [None req-45c38e4d-7aa2-4c86-aa60-b3c0741808b6 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: c0b7de50-17ad-4f8e-9887-345ca08e9d33] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.333061] env[67424]: DEBUG nova.compute.manager [None req-45c38e4d-7aa2-4c86-aa60-b3c0741808b6 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: c0b7de50-17ad-4f8e-9887-345ca08e9d33] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.357181] env[67424]: DEBUG oslo_concurrency.lockutils [None req-45c38e4d-7aa2-4c86-aa60-b3c0741808b6 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "c0b7de50-17ad-4f8e-9887-345ca08e9d33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.259s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.367414] env[67424]: DEBUG nova.compute.manager [None req-72673e12-b998-4ea2-8c84-88bb7ed7a516 tempest-ServerPasswordTestJSON-1344184811 tempest-ServerPasswordTestJSON-1344184811-project-member] [instance: 38255062-f950-4b14-90e1-96a30ce2843e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.387659] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.390953] env[67424]: DEBUG nova.compute.manager [None req-72673e12-b998-4ea2-8c84-88bb7ed7a516 tempest-ServerPasswordTestJSON-1344184811 tempest-ServerPasswordTestJSON-1344184811-project-member] [instance: 38255062-f950-4b14-90e1-96a30ce2843e] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.412578] env[67424]: DEBUG oslo_concurrency.lockutils [None req-72673e12-b998-4ea2-8c84-88bb7ed7a516 tempest-ServerPasswordTestJSON-1344184811 tempest-ServerPasswordTestJSON-1344184811-project-member] Lock "38255062-f950-4b14-90e1-96a30ce2843e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.008s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.421477] env[67424]: DEBUG nova.compute.manager [None req-0aeb2f04-8dca-4bd1-839a-58b32d3acbb6 tempest-ServerRescueTestJSON-528481978 tempest-ServerRescueTestJSON-528481978-project-member] [instance: 1e5007de-23df-4c65-9210-f460ed564216] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.449252] env[67424]: DEBUG nova.compute.manager [None req-0aeb2f04-8dca-4bd1-839a-58b32d3acbb6 tempest-ServerRescueTestJSON-528481978 tempest-ServerRescueTestJSON-528481978-project-member] [instance: 1e5007de-23df-4c65-9210-f460ed564216] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.470414] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0aeb2f04-8dca-4bd1-839a-58b32d3acbb6 tempest-ServerRescueTestJSON-528481978 tempest-ServerRescueTestJSON-528481978-project-member] Lock "1e5007de-23df-4c65-9210-f460ed564216" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.104s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.479469] env[67424]: DEBUG nova.compute.manager [None req-0fa9ed84-8ebf-4c02-920f-9e7f5f5f19ef tempest-ServerActionsTestOtherB-1172401824 tempest-ServerActionsTestOtherB-1172401824-project-member] [instance: cfef8a4c-abc4-4003-a932-e2f823c84e3e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.502636] env[67424]: DEBUG nova.compute.manager [None req-0fa9ed84-8ebf-4c02-920f-9e7f5f5f19ef tempest-ServerActionsTestOtherB-1172401824 tempest-ServerActionsTestOtherB-1172401824-project-member] [instance: cfef8a4c-abc4-4003-a932-e2f823c84e3e] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1150.522529] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0fa9ed84-8ebf-4c02-920f-9e7f5f5f19ef tempest-ServerActionsTestOtherB-1172401824 tempest-ServerActionsTestOtherB-1172401824-project-member] Lock "cfef8a4c-abc4-4003-a932-e2f823c84e3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.059s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.530582] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1150.581249] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.581503] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.583045] env[67424]: INFO nova.compute.claims [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1150.975127] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bceb750b-7212-4b37-acc1-93a491c970eb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.982953] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3db0ac-4ed2-4357-b456-395f2587ef7f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.012917] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c6e5a8-d126-4f76-906b-67dd988c332f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.019819] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd9b2d6-3903-42da-b56e-48e8f9c967b8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.693828] env[67424]: DEBUG nova.compute.provider_tree [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.703864] env[67424]: DEBUG nova.scheduler.client.report [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1151.720810] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.139s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.721382] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1151.760010] env[67424]: DEBUG nova.compute.utils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1151.761348] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1151.761742] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1151.770973] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1151.842478] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1151.850583] env[67424]: DEBUG nova.policy [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a220159ce8c745c7bfb5969bc1edb7eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82b3f18f20364a5ca6f6c788baf738b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1151.870223] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1151.870475] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1151.870631] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.870806] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1151.870948] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.871113] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1151.871323] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1151.871479] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1151.871643] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1151.871800] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1151.871967] env[67424]: DEBUG nova.virt.hardware [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1151.872851] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdcccca-2ffd-4e7e-b38b-73767622bed6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.881653] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075d3a7c-fcba-4e24-a30a-51b727bf695d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.383102] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.395808] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Successfully created port: 914bcb9c-eea6-4178-bea2-4d6698e2930e {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1153.388865] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.388865] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.487562] env[67424]: DEBUG nova.compute.manager [req-8fc84355-9d34-4c77-a67e-27237bd78964 req-ca209b64-f70f-4f76-b260-e7459fa71342 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Received event network-vif-plugged-914bcb9c-eea6-4178-bea2-4d6698e2930e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1153.487804] env[67424]: DEBUG oslo_concurrency.lockutils [req-8fc84355-9d34-4c77-a67e-27237bd78964 req-ca209b64-f70f-4f76-b260-e7459fa71342 service nova] Acquiring lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.488185] env[67424]: DEBUG oslo_concurrency.lockutils [req-8fc84355-9d34-4c77-a67e-27237bd78964 req-ca209b64-f70f-4f76-b260-e7459fa71342 service nova] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.488408] env[67424]: DEBUG oslo_concurrency.lockutils [req-8fc84355-9d34-4c77-a67e-27237bd78964 req-ca209b64-f70f-4f76-b260-e7459fa71342 service nova] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.488581] env[67424]: DEBUG nova.compute.manager [req-8fc84355-9d34-4c77-a67e-27237bd78964 req-ca209b64-f70f-4f76-b260-e7459fa71342 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] No waiting events found dispatching network-vif-plugged-914bcb9c-eea6-4178-bea2-4d6698e2930e {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1153.488748] env[67424]: WARNING nova.compute.manager [req-8fc84355-9d34-4c77-a67e-27237bd78964 req-ca209b64-f70f-4f76-b260-e7459fa71342 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Received unexpected event network-vif-plugged-914bcb9c-eea6-4178-bea2-4d6698e2930e for instance with vm_state building and task_state spawning. [ 1153.678504] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Successfully updated port: 914bcb9c-eea6-4178-bea2-4d6698e2930e {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1153.695603] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "refresh_cache-aa454838-2a3f-40a0-825d-1b3f2656a6a5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.695603] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquired lock "refresh_cache-aa454838-2a3f-40a0-825d-1b3f2656a6a5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.695603] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1153.748276] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1154.085055] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Updating instance_info_cache with network_info: [{"id": "914bcb9c-eea6-4178-bea2-4d6698e2930e", "address": "fa:16:3e:d2:85:ba", "network": {"id": "03ff4e8f-f414-417d-8dea-6ceb7673b703", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1673153373-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b3f18f20364a5ca6f6c788baf738b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap914bcb9c-ee", "ovs_interfaceid": "914bcb9c-eea6-4178-bea2-4d6698e2930e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.098874] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Releasing lock "refresh_cache-aa454838-2a3f-40a0-825d-1b3f2656a6a5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.098874] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Instance network_info: |[{"id": "914bcb9c-eea6-4178-bea2-4d6698e2930e", "address": "fa:16:3e:d2:85:ba", "network": {"id": "03ff4e8f-f414-417d-8dea-6ceb7673b703", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1673153373-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b3f18f20364a5ca6f6c788baf738b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap914bcb9c-ee", "ovs_interfaceid": "914bcb9c-eea6-4178-bea2-4d6698e2930e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1154.099194] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:85:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '914bcb9c-eea6-4178-bea2-4d6698e2930e', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1154.105623] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Creating folder: Project (82b3f18f20364a5ca6f6c788baf738b9). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1154.106509] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66edb06b-4397-4820-b36e-43da401a2f91 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.118613] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Created folder: Project (82b3f18f20364a5ca6f6c788baf738b9) in parent group-v639843. [ 1154.118803] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Creating folder: Instances. Parent ref: group-v639906. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1154.119049] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14325f7f-7e8d-490f-a33d-83e5cc04af8f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.129045] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Created folder: Instances in parent group-v639906. [ 1154.129045] env[67424]: DEBUG oslo.service.loopingcall [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1154.129045] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1154.129045] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce29ef21-16b8-4b65-ae2b-dbb141b4d88a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.147562] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1154.147562] env[67424]: value = "task-3199995" [ 1154.147562] env[67424]: _type = "Task" [ 1154.147562] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.155813] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199995, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.387885] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.388252] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.658290] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3199995, 'name': CreateVM_Task, 'duration_secs': 0.291895} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.658649] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1154.659175] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.659346] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.659666] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1154.659914] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb63d4a-309f-4e13-b24b-792a6bfcf463 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.664139] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Waiting for the task: (returnval){ [ 1154.664139] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]529e0f26-a29a-63e0-034c-6521660549f7" [ 1154.664139] env[67424]: _type = "Task" [ 1154.664139] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.673511] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]529e0f26-a29a-63e0-034c-6521660549f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.174491] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.174751] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1155.174999] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.387401] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.398776] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.399007] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.399236] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.399516] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1155.400663] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40976e26-3993-42bb-85be-8ed87982c34d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.409955] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbb635d-8d53-4d8e-90b5-4755bafd1144 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.425936] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2225c6b6-5611-40ef-a4dd-389f7f93354a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.432873] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240c300a-150c-4b3a-90af-ee556877a048 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.462454] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180983MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1155.462619] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.462815] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.542039] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.542039] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.542178] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.542338] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.542495] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.542652] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.542774] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.542889] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.543035] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.543265] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1155.546454] env[67424]: DEBUG nova.compute.manager [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Received event network-changed-914bcb9c-eea6-4178-bea2-4d6698e2930e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1155.546637] env[67424]: DEBUG nova.compute.manager [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Refreshing instance network info cache due to event network-changed-914bcb9c-eea6-4178-bea2-4d6698e2930e. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1155.546844] env[67424]: DEBUG oslo_concurrency.lockutils [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] Acquiring lock "refresh_cache-aa454838-2a3f-40a0-825d-1b3f2656a6a5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.547088] env[67424]: DEBUG oslo_concurrency.lockutils [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] Acquired lock "refresh_cache-aa454838-2a3f-40a0-825d-1b3f2656a6a5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.547239] env[67424]: DEBUG nova.network.neutron [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Refreshing network info cache for port 914bcb9c-eea6-4178-bea2-4d6698e2930e {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1155.555037] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 85b0863a-17ed-4cad-8086-abc6f5755225 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.565690] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.577485] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a0c7d0b0-e26c-4cba-9a45-8e60486e3905 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.587438] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.597686] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.610184] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4b3afdb2-16fd-453c-b831-7ad5a0a74772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.624273] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0e1ff97a-fcea-4a95-a9fb-d35797c914fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.634492] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2580169a-b80b-43fe-bd63-9a09723a691e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.644441] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.654878] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3525b872-dfbb-44d6-853d-8d0612cec3f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.664364] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2ad24d5c-afc8-435e-a9b7-3b25a7ffd587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.674056] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 9eeec7b3-1a9f-4783-a84e-970d5a85129e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.683345] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.692865] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 28e7dff8-b59f-4357-a5b7-48e713d59fac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.701835] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2ad72ba1-1f79-4a2c-b411-3e0f51ca342b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.711461] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 654172d5-94b4-427b-930d-7e8d1fa31d36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.720586] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance b12975c8-0bce-41b6-afae-f78d34a9309f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.730605] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 784fdc06-dfeb-403b-b0b6-38399a4cf972 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.741218] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance fa2a9d05-598d-41ab-9b74-a3b50d49777d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.752142] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2f42c7da-46a0-4ae2-9ac9-92527183814b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1155.752346] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1155.752491] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1155.835072] env[67424]: DEBUG nova.network.neutron [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Updated VIF entry in instance network info cache for port 914bcb9c-eea6-4178-bea2-4d6698e2930e. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1155.835493] env[67424]: DEBUG nova.network.neutron [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Updating instance_info_cache with network_info: [{"id": "914bcb9c-eea6-4178-bea2-4d6698e2930e", "address": "fa:16:3e:d2:85:ba", "network": {"id": "03ff4e8f-f414-417d-8dea-6ceb7673b703", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1673153373-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b3f18f20364a5ca6f6c788baf738b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap914bcb9c-ee", "ovs_interfaceid": "914bcb9c-eea6-4178-bea2-4d6698e2930e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.844544] env[67424]: DEBUG oslo_concurrency.lockutils [req-efc96f38-1d7c-4ca9-af2e-b6402653220e req-5fb50a09-2306-49dc-8ae7-efbe4eeb1828 service nova] Releasing lock "refresh_cache-aa454838-2a3f-40a0-825d-1b3f2656a6a5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.098217] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94203822-7447-4d9f-88e5-74d502877283 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.105572] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ed0dd8-c7f5-4d74-8301-384b146c5071 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.135375] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3efacd-d185-41a6-b795-69f58ca07a43 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.142530] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a14aae-0222-45a9-a975-96ac4b0e28e6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.155481] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.164271] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1156.177405] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1156.177584] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.715s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.222011] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.178450] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.178834] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1159.178834] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1159.204902] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205090] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205227] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205354] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205479] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205598] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205717] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205836] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.205953] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.206083] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1159.206209] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1198.200468] env[67424]: WARNING oslo_vmware.rw_handles [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1198.200468] env[67424]: ERROR oslo_vmware.rw_handles [ 1198.201090] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1198.203587] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1198.203853] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Copying Virtual Disk [datastore2] vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/3be8cea6-bf6a-4b20-b533-cf134197f8fd/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1198.204159] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8766e6c-374b-42b7-bc6d-af35f07aa7b9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.211834] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Waiting for the task: (returnval){ [ 1198.211834] env[67424]: value = "task-3199996" [ 1198.211834] env[67424]: _type = "Task" [ 1198.211834] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.219851] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Task: {'id': task-3199996, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.721712] env[67424]: DEBUG oslo_vmware.exceptions [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1198.721839] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.722300] env[67424]: ERROR nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1198.722300] env[67424]: Faults: ['InvalidArgument'] [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Traceback (most recent call last): [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] yield resources [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self.driver.spawn(context, instance, image_meta, [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self._fetch_image_if_missing(context, vi) [ 1198.722300] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] image_cache(vi, tmp_image_ds_loc) [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] vm_util.copy_virtual_disk( [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] session._wait_for_task(vmdk_copy_task) [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] return self.wait_for_task(task_ref) [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] return evt.wait() [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] result = hub.switch() [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1198.722680] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] return self.greenlet.switch() [ 1198.723020] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1198.723020] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self.f(*self.args, **self.kw) [ 1198.723020] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1198.723020] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] raise exceptions.translate_fault(task_info.error) [ 1198.723020] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1198.723020] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Faults: ['InvalidArgument'] [ 1198.723020] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] [ 1198.723020] env[67424]: INFO nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Terminating instance [ 1198.724284] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.724486] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.724726] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a46a96e-06e1-4c79-bae5-6ed7b3a14e17 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.728094] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1198.728289] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1198.729044] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2161b97c-65c3-44f2-ba00-0ffd0815fcf7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.735464] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1198.735835] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83fbdcb2-befb-4388-9022-fabf5256d830 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.737909] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.738117] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1198.739042] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-109bf535-75c6-450d-889d-6b927ae5e750 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.743712] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Waiting for the task: (returnval){ [ 1198.743712] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52dbb452-232f-4cad-cc8e-29b47916e58e" [ 1198.743712] env[67424]: _type = "Task" [ 1198.743712] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.756468] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52dbb452-232f-4cad-cc8e-29b47916e58e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.806282] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1198.806495] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1198.806665] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Deleting the datastore file [datastore2] de7549c2-328b-4ab2-b590-c32f8a7d3261 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.806935] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6c6c9f2-6178-466e-853f-1459f682c977 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.813038] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Waiting for the task: (returnval){ [ 1198.813038] env[67424]: value = "task-3199998" [ 1198.813038] env[67424]: _type = "Task" [ 1198.813038] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.820594] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Task: {'id': task-3199998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.255107] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1199.255402] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Creating directory with path [datastore2] vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1199.255652] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f4c64ff-c316-48f6-91cf-9e13171f1ee9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.266930] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Created directory with path [datastore2] vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1199.267135] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Fetch image to [datastore2] vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1199.267309] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1199.268104] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63c1d88-bc65-4147-91ba-50356d3d77c3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.275088] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da950334-83cf-4931-8d06-a69d32b637f6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.284084] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1543b449-eb14-4ae8-b63e-5b8cc4d46d05 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.318423] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90c1369-cc7f-4f29-9ed1-de5150c3763a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.325278] env[67424]: DEBUG oslo_vmware.api [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Task: {'id': task-3199998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062428} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.326687] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1199.326888] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1199.327068] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1199.327248] env[67424]: INFO nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1199.329674] env[67424]: DEBUG nova.compute.claims [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1199.329884] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.330138] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.332785] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-53037e78-898e-411a-82a6-9898e441cce3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.355075] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1199.412963] env[67424]: DEBUG oslo_vmware.rw_handles [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1199.478125] env[67424]: DEBUG oslo_vmware.rw_handles [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1199.478401] env[67424]: DEBUG oslo_vmware.rw_handles [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1199.832107] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a82e6d5-190b-4203-a910-9c2fb833179d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.839924] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a814a65-be0e-4406-9aa7-f77092628852 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.868991] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41ea08c-acfc-490d-933e-d10ec7cc5f53 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.876151] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5284b967-b579-40e2-92af-c8e3826b5097 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.889976] env[67424]: DEBUG nova.compute.provider_tree [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.898870] env[67424]: DEBUG nova.scheduler.client.report [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1199.913231] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.583s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.913751] env[67424]: ERROR nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1199.913751] env[67424]: Faults: ['InvalidArgument'] [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Traceback (most recent call last): [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self.driver.spawn(context, instance, image_meta, [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self._fetch_image_if_missing(context, vi) [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] image_cache(vi, tmp_image_ds_loc) [ 1199.913751] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] vm_util.copy_virtual_disk( [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] session._wait_for_task(vmdk_copy_task) [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] return self.wait_for_task(task_ref) [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] return evt.wait() [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] result = hub.switch() [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] return self.greenlet.switch() [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1199.914093] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] self.f(*self.args, **self.kw) [ 1199.914417] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1199.914417] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] raise exceptions.translate_fault(task_info.error) [ 1199.914417] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1199.914417] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Faults: ['InvalidArgument'] [ 1199.914417] env[67424]: ERROR nova.compute.manager [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] [ 1199.914551] env[67424]: DEBUG nova.compute.utils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1199.915923] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Build of instance de7549c2-328b-4ab2-b590-c32f8a7d3261 was re-scheduled: A specified parameter was not correct: fileType [ 1199.915923] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1199.916300] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1199.916471] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1199.916639] env[67424]: DEBUG nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1199.916799] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1200.419645] env[67424]: DEBUG nova.network.neutron [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.431747] env[67424]: INFO nova.compute.manager [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Took 0.51 seconds to deallocate network for instance. [ 1200.548561] env[67424]: INFO nova.scheduler.client.report [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Deleted allocations for instance de7549c2-328b-4ab2-b590-c32f8a7d3261 [ 1200.577997] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a1d38220-ae9b-4b69-b958-7cf433a1aa73 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 525.990s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.579432] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 327.669s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.579669] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Acquiring lock "de7549c2-328b-4ab2-b590-c32f8a7d3261-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.579903] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.580147] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.583229] env[67424]: INFO nova.compute.manager [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Terminating instance [ 1200.586217] env[67424]: DEBUG nova.compute.manager [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1200.586217] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1200.586414] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adbc32d3-30ca-4741-a561-339ea91ce099 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.597155] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9576e6-1a03-4af3-aedc-1c4325ddb171 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.608716] env[67424]: DEBUG nova.compute.manager [None req-ec44372d-2931-49b4-8215-c9b1d2de0f8e tempest-ServersTestBootFromVolume-1003476590 tempest-ServersTestBootFromVolume-1003476590-project-member] [instance: 85b0863a-17ed-4cad-8086-abc6f5755225] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.632587] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance de7549c2-328b-4ab2-b590-c32f8a7d3261 could not be found. [ 1200.632815] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1200.633009] env[67424]: INFO nova.compute.manager [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1200.633273] env[67424]: DEBUG oslo.service.loopingcall [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1200.633522] env[67424]: DEBUG nova.compute.manager [-] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1200.633618] env[67424]: DEBUG nova.network.neutron [-] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1200.642256] env[67424]: DEBUG nova.compute.manager [None req-ec44372d-2931-49b4-8215-c9b1d2de0f8e tempest-ServersTestBootFromVolume-1003476590 tempest-ServersTestBootFromVolume-1003476590-project-member] [instance: 85b0863a-17ed-4cad-8086-abc6f5755225] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.668266] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ec44372d-2931-49b4-8215-c9b1d2de0f8e tempest-ServersTestBootFromVolume-1003476590 tempest-ServersTestBootFromVolume-1003476590-project-member] Lock "85b0863a-17ed-4cad-8086-abc6f5755225" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.206s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.671447] env[67424]: DEBUG nova.network.neutron [-] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.678104] env[67424]: DEBUG nova.compute.manager [None req-9d4e6525-2952-4a92-b1ed-b925cdbffeba tempest-ServersNegativeTestMultiTenantJSON-500119815 tempest-ServersNegativeTestMultiTenantJSON-500119815-project-member] [instance: 8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.680831] env[67424]: INFO nova.compute.manager [-] [instance: de7549c2-328b-4ab2-b590-c32f8a7d3261] Took 0.05 seconds to deallocate network for instance. [ 1200.703070] env[67424]: DEBUG nova.compute.manager [None req-9d4e6525-2952-4a92-b1ed-b925cdbffeba tempest-ServersNegativeTestMultiTenantJSON-500119815 tempest-ServersNegativeTestMultiTenantJSON-500119815-project-member] [instance: 8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.733042] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9d4e6525-2952-4a92-b1ed-b925cdbffeba tempest-ServersNegativeTestMultiTenantJSON-500119815 tempest-ServersNegativeTestMultiTenantJSON-500119815-project-member] Lock "8abd9aa5-be09-4e73-9ccc-a726c5d3cc6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.170s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.745118] env[67424]: DEBUG nova.compute.manager [None req-e2010fdd-9f07-4e36-9942-2add29834856 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: a0c7d0b0-e26c-4cba-9a45-8e60486e3905] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.772212] env[67424]: DEBUG nova.compute.manager [None req-e2010fdd-9f07-4e36-9942-2add29834856 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] [instance: a0c7d0b0-e26c-4cba-9a45-8e60486e3905] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1200.793454] env[67424]: DEBUG oslo_concurrency.lockutils [None req-6fe6042c-eff0-4ccc-85dd-c4fb495dae41 tempest-ServersV294TestFqdnHostnames-1763215016 tempest-ServersV294TestFqdnHostnames-1763215016-project-member] Lock "de7549c2-328b-4ab2-b590-c32f8a7d3261" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.800558] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e2010fdd-9f07-4e36-9942-2add29834856 tempest-MigrationsAdminTest-2110344523 tempest-MigrationsAdminTest-2110344523-project-member] Lock "a0c7d0b0-e26c-4cba-9a45-8e60486e3905" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.717s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.812168] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1200.873109] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.873429] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.874927] env[67424]: INFO nova.compute.claims [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.231215] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534d528c-13cd-4133-88e9-e61dc1c17012 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.239017] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff1ab22-b542-493d-b814-6f3f42c3112b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.268577] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e729253f-d03f-449b-8c41-1d269c367050 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.276123] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6520c9-98cc-43c8-ab3a-fde58e48aaa1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.289572] env[67424]: DEBUG nova.compute.provider_tree [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.300250] env[67424]: DEBUG nova.scheduler.client.report [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1201.315451] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.442s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.315914] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1201.357581] env[67424]: DEBUG nova.compute.utils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1201.359085] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Not allocating networking since 'none' was specified. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1201.367729] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1201.429515] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1201.451958] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1201.452230] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1201.452390] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.452575] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1201.452721] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.452871] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1201.453089] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1201.453253] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1201.453419] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1201.453581] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1201.453753] env[67424]: DEBUG nova.virt.hardware [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1201.454999] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7921cc1-b177-4019-a0cd-379af7e7f232 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.462715] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39b0348-d4e6-4e3d-bd72-e839753c9180 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.476159] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance VIF info [] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1201.481625] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Creating folder: Project (a0c2bceb79e74ac487a31ccbd835b7a7). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1201.481915] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8120c6af-3998-4a66-bc39-f97949d43255 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.491081] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Created folder: Project (a0c2bceb79e74ac487a31ccbd835b7a7) in parent group-v639843. [ 1201.491890] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Creating folder: Instances. Parent ref: group-v639909. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1201.491890] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10ce339d-6d56-4b7b-91ea-ee16c4501238 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.499238] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Created folder: Instances in parent group-v639909. [ 1201.499360] env[67424]: DEBUG oslo.service.loopingcall [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1201.499502] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1201.499698] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0c1b2ad-c747-4b89-aaab-b4041cdecbb4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.515887] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1201.515887] env[67424]: value = "task-3200001" [ 1201.515887] env[67424]: _type = "Task" [ 1201.515887] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.523239] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200001, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.025412] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200001, 'name': CreateVM_Task, 'duration_secs': 0.252315} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.025615] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1202.026148] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.026364] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.026822] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1202.027147] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767ee1c5-3782-4b30-a221-7fc964965896 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.031971] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Waiting for the task: (returnval){ [ 1202.031971] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52cc0d69-28dd-d97c-b2c4-91301fcf2622" [ 1202.031971] env[67424]: _type = "Task" [ 1202.031971] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.041329] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52cc0d69-28dd-d97c-b2c4-91301fcf2622, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.542514] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.543125] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1202.543470] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.388874] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.389194] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1206.403144] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] There are 0 instances to clean {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1207.388260] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.395762] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.396057] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1209.387865] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.387865] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances with incomplete migration {{(pid=67424) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1211.397984] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.383816] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.388517] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.388888] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.388518] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.382907] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.408784] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.421098] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.421861] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.421861] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.421861] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1215.422707] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d7d6b9-6ce6-4859-8274-dccf88b12b7a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.431460] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3ad7f0-b13f-4a5a-ac64-fd605227be70 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.445357] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f23f97a-e05c-4943-a8ac-9cc991c32616 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.451803] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7416a0-f2ba-4e58-be42-8f0ec8223ce0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.482113] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181009MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1215.482113] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.482113] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.654307] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 924956a0-9a91-4870-a240-6a1d7868904b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.654478] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.654613] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.654736] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.654853] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.654971] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.655106] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.655225] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.655337] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.655452] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.667142] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.677389] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4b3afdb2-16fd-453c-b831-7ad5a0a74772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.688233] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0e1ff97a-fcea-4a95-a9fb-d35797c914fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.697934] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2580169a-b80b-43fe-bd63-9a09723a691e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.707536] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.716721] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3525b872-dfbb-44d6-853d-8d0612cec3f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.726780] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2ad24d5c-afc8-435e-a9b7-3b25a7ffd587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.736842] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 9eeec7b3-1a9f-4783-a84e-970d5a85129e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.746166] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.755821] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 28e7dff8-b59f-4357-a5b7-48e713d59fac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.764776] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2ad72ba1-1f79-4a2c-b411-3e0f51ca342b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.773683] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 654172d5-94b4-427b-930d-7e8d1fa31d36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.782492] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance b12975c8-0bce-41b6-afae-f78d34a9309f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.791552] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 784fdc06-dfeb-403b-b0b6-38399a4cf972 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.800938] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance fa2a9d05-598d-41ab-9b74-a3b50d49777d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.810320] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2f42c7da-46a0-4ae2-9ac9-92527183814b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1215.810556] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1215.810689] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1216.141113] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c735bc87-8198-40f1-9aeb-fb5f5a592ec1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.148453] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ff94ea-0520-4e8c-8867-700d3cd2d862 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.177174] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab8a43e-fd3e-421c-8874-1f46df2edfa7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.183876] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152f7fa2-cfe9-47e7-9e3d-5308cdd690c5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.196937] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.210730] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1216.223441] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1216.223551] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.742s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.203472] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1219.388160] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1219.388528] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1219.388528] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1219.412248] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.412438] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.412550] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.412652] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.412772] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.412942] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.413132] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.413329] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.413459] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.413523] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1219.413595] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1234.702552] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.722432] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Getting list of instances from cluster (obj){ [ 1234.722432] env[67424]: value = "domain-c8" [ 1234.722432] env[67424]: _type = "ClusterComputeResource" [ 1234.722432] env[67424]: } {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1234.723751] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e368bda1-46c3-42b1-88bb-9b5b5ba1a66d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.741420] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Got total of 10 instances {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1234.741593] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 924956a0-9a91-4870-a240-6a1d7868904b {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.741794] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.741953] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.742119] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid a7d131b6-3584-48c3-acce-d553c145a837 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.742294] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 850df4c3-3a92-47d3-973d-62f41d813f6c {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.742454] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 2489aa3d-1973-4ede-9cae-dab971fa4a7c {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.742602] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 3e4e39f2-9267-4076-a302-d5210cb3d5ff {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.742748] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 4e370013-5dfb-467c-8709-c0a0b256a9aa {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.742932] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid aa454838-2a3f-40a0-825d-1b3f2656a6a5 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.743047] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1234.743407] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "924956a0-9a91-4870-a240-6a1d7868904b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.743649] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.743833] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.744124] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "a7d131b6-3584-48c3-acce-d553c145a837" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.744339] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "850df4c3-3a92-47d3-973d-62f41d813f6c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.745032] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.745032] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.745032] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.745206] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.745334] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.218894] env[67424]: WARNING oslo_vmware.rw_handles [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.218894] env[67424]: ERROR oslo_vmware.rw_handles [ 1248.219522] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1248.221048] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1248.221295] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Copying Virtual Disk [datastore2] vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/f7a54c26-d0cd-4cc4-90fb-47dc230ce63f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1248.221580] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ca4dc20-0ebc-44f5-93ef-edc003e9fb0e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.229749] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Waiting for the task: (returnval){ [ 1248.229749] env[67424]: value = "task-3200002" [ 1248.229749] env[67424]: _type = "Task" [ 1248.229749] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.237683] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Task: {'id': task-3200002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.741143] env[67424]: DEBUG oslo_vmware.exceptions [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1248.741143] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.741598] env[67424]: ERROR nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1248.741598] env[67424]: Faults: ['InvalidArgument'] [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Traceback (most recent call last): [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] yield resources [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self.driver.spawn(context, instance, image_meta, [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self._fetch_image_if_missing(context, vi) [ 1248.741598] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] image_cache(vi, tmp_image_ds_loc) [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] vm_util.copy_virtual_disk( [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] session._wait_for_task(vmdk_copy_task) [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] return self.wait_for_task(task_ref) [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] return evt.wait() [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] result = hub.switch() [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1248.742228] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] return self.greenlet.switch() [ 1248.742838] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1248.742838] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self.f(*self.args, **self.kw) [ 1248.742838] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1248.742838] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] raise exceptions.translate_fault(task_info.error) [ 1248.742838] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1248.742838] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Faults: ['InvalidArgument'] [ 1248.742838] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] [ 1248.742838] env[67424]: INFO nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Terminating instance [ 1248.743493] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.743699] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1248.743937] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e8676de-4385-4a3f-99ae-11d608a7ef3a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.747426] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1248.747648] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1248.748377] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7affc59-35bd-42aa-bbff-1744deffc5cd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.751970] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1248.752165] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1248.752838] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c89fe70-8034-432d-a197-bf093a4a7840 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.756780] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1248.757284] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eead490a-a2f0-4264-87fe-4817a0bec509 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.759499] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Waiting for the task: (returnval){ [ 1248.759499] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52588572-64b7-23a6-f7fe-4f0115969587" [ 1248.759499] env[67424]: _type = "Task" [ 1248.759499] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.768016] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52588572-64b7-23a6-f7fe-4f0115969587, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.819370] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1248.819618] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1248.819788] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Deleting the datastore file [datastore2] 924956a0-9a91-4870-a240-6a1d7868904b {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1248.820051] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1b6ff08-b041-45a1-8dea-db0e57476456 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.825476] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Waiting for the task: (returnval){ [ 1248.825476] env[67424]: value = "task-3200004" [ 1248.825476] env[67424]: _type = "Task" [ 1248.825476] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.834637] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Task: {'id': task-3200004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.270704] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1249.271014] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Creating directory with path [datastore2] vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.271014] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91f53b23-7a69-4bcb-99d5-3dbe834be973 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.283849] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Created directory with path [datastore2] vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.283849] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Fetch image to [datastore2] vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1249.283849] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1249.283849] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07de2ac7-b568-4f37-8514-1e1c649f8dd9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.290157] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e688d6-634f-43c5-8c8f-9013e982c653 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.299146] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555bed2a-6b54-4d86-a022-bcde7ce544d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.332777] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c472ffad-83af-4475-9051-5625829621e5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.339906] env[67424]: DEBUG oslo_vmware.api [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Task: {'id': task-3200004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072976} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.341353] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1249.341548] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1249.341724] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1249.341902] env[67424]: INFO nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1249.343729] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-14c406b0-1cc0-4497-935c-b1518694fb5f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.345988] env[67424]: DEBUG nova.compute.claims [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1249.346178] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.346388] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.369141] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1249.423592] env[67424]: DEBUG oslo_vmware.rw_handles [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1249.484244] env[67424]: DEBUG oslo_vmware.rw_handles [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1249.485085] env[67424]: DEBUG oslo_vmware.rw_handles [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1249.729161] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc1e605-5eb2-40fb-a20a-460a7dfb3360 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.738029] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd87327-166a-4ba7-8faf-64550e4a9de8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.767366] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbea8139-975b-45a5-8f51-a06c414590ef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.774561] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c36222-e3c7-4c9a-ac2f-c96d95232257 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.787167] env[67424]: DEBUG nova.compute.provider_tree [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.796042] env[67424]: DEBUG nova.scheduler.client.report [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1249.809290] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.463s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.809815] env[67424]: ERROR nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1249.809815] env[67424]: Faults: ['InvalidArgument'] [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Traceback (most recent call last): [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self.driver.spawn(context, instance, image_meta, [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self._fetch_image_if_missing(context, vi) [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] image_cache(vi, tmp_image_ds_loc) [ 1249.809815] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] vm_util.copy_virtual_disk( [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] session._wait_for_task(vmdk_copy_task) [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] return self.wait_for_task(task_ref) [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] return evt.wait() [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] result = hub.switch() [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] return self.greenlet.switch() [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1249.810332] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] self.f(*self.args, **self.kw) [ 1249.810719] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1249.810719] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] raise exceptions.translate_fault(task_info.error) [ 1249.810719] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1249.810719] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Faults: ['InvalidArgument'] [ 1249.810719] env[67424]: ERROR nova.compute.manager [instance: 924956a0-9a91-4870-a240-6a1d7868904b] [ 1249.810719] env[67424]: DEBUG nova.compute.utils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1249.811824] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Build of instance 924956a0-9a91-4870-a240-6a1d7868904b was re-scheduled: A specified parameter was not correct: fileType [ 1249.811824] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1249.812199] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1249.812373] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1249.812546] env[67424]: DEBUG nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1249.812709] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.131889] env[67424]: DEBUG nova.network.neutron [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.147544] env[67424]: INFO nova.compute.manager [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Took 0.33 seconds to deallocate network for instance. [ 1250.255522] env[67424]: INFO nova.scheduler.client.report [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Deleted allocations for instance 924956a0-9a91-4870-a240-6a1d7868904b [ 1250.279819] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3dd2888c-e75e-4e2c-a62b-12acf00cde06 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "924956a0-9a91-4870-a240-6a1d7868904b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 567.393s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.281017] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "924956a0-9a91-4870-a240-6a1d7868904b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 368.683s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.281243] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Acquiring lock "924956a0-9a91-4870-a240-6a1d7868904b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.281450] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "924956a0-9a91-4870-a240-6a1d7868904b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.281621] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "924956a0-9a91-4870-a240-6a1d7868904b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.286529] env[67424]: INFO nova.compute.manager [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Terminating instance [ 1250.288807] env[67424]: DEBUG nova.compute.manager [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1250.289591] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1250.289591] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-499b6283-632f-4ee8-9d50-44683a3d8cbe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.295159] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1250.301432] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d804453-bd91-4198-9ac0-92e215ac3fbd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.342214] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 924956a0-9a91-4870-a240-6a1d7868904b could not be found. [ 1250.342577] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1250.342758] env[67424]: INFO nova.compute.manager [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1250.343035] env[67424]: DEBUG oslo.service.loopingcall [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.346070] env[67424]: DEBUG nova.compute.manager [-] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1250.346168] env[67424]: DEBUG nova.network.neutron [-] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.370110] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.370339] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.372027] env[67424]: INFO nova.compute.claims [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1250.382205] env[67424]: DEBUG nova.network.neutron [-] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.395671] env[67424]: INFO nova.compute.manager [-] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] Took 0.05 seconds to deallocate network for instance. [ 1250.501656] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a0360c84-30f0-419a-b6d8-7b454a6ec1ec tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "924956a0-9a91-4870-a240-6a1d7868904b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.220s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.502981] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "924956a0-9a91-4870-a240-6a1d7868904b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.760s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.503198] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 924956a0-9a91-4870-a240-6a1d7868904b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1250.503369] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "924956a0-9a91-4870-a240-6a1d7868904b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.731838] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece2f69b-0e9d-46ba-b71a-4863771f5cb9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.739249] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d957262f-2c50-4107-8366-c0a007df78f5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.768223] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4077fae-402d-46c2-81a9-bf54b80524ab {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.775074] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d0dcf1-ab8a-4cc2-b0ef-39db93e521ef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.787942] env[67424]: DEBUG nova.compute.provider_tree [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.796065] env[67424]: DEBUG nova.scheduler.client.report [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1250.809714] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.439s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.810186] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1250.841948] env[67424]: DEBUG nova.compute.utils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1250.843631] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1250.843776] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1250.852922] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1250.914351] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1250.942431] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1250.942967] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1250.942967] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.943161] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1250.943161] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.943544] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1250.943544] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1250.943765] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1250.943803] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1250.943968] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1250.944153] env[67424]: DEBUG nova.virt.hardware [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1250.945023] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32ed942-b1f0-4537-be91-07bcaeb188ea {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.953285] env[67424]: DEBUG nova.policy [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66293d02fafb4d29a9f66c200ddb4624', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fcf3dd62a69439a8bba32c3be865075', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1250.955595] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a77e4d-f834-41d9-941d-f3c621fb02f4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.266460] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Successfully created port: 16773d10-05e5-43b7-b9eb-64346d364338 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1252.413097] env[67424]: DEBUG nova.compute.manager [req-6a0b18fa-67be-434c-8287-1709de199f23 req-634135d1-319d-4528-8804-05a5f94ca0cf service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Received event network-vif-plugged-16773d10-05e5-43b7-b9eb-64346d364338 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1252.413335] env[67424]: DEBUG oslo_concurrency.lockutils [req-6a0b18fa-67be-434c-8287-1709de199f23 req-634135d1-319d-4528-8804-05a5f94ca0cf service nova] Acquiring lock "15544bb1-1353-4b19-ac1e-967f2e43713e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.413533] env[67424]: DEBUG oslo_concurrency.lockutils [req-6a0b18fa-67be-434c-8287-1709de199f23 req-634135d1-319d-4528-8804-05a5f94ca0cf service nova] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.413710] env[67424]: DEBUG oslo_concurrency.lockutils [req-6a0b18fa-67be-434c-8287-1709de199f23 req-634135d1-319d-4528-8804-05a5f94ca0cf service nova] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.413871] env[67424]: DEBUG nova.compute.manager [req-6a0b18fa-67be-434c-8287-1709de199f23 req-634135d1-319d-4528-8804-05a5f94ca0cf service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] No waiting events found dispatching network-vif-plugged-16773d10-05e5-43b7-b9eb-64346d364338 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1252.414233] env[67424]: WARNING nova.compute.manager [req-6a0b18fa-67be-434c-8287-1709de199f23 req-634135d1-319d-4528-8804-05a5f94ca0cf service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Received unexpected event network-vif-plugged-16773d10-05e5-43b7-b9eb-64346d364338 for instance with vm_state building and task_state spawning. [ 1252.484380] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Successfully updated port: 16773d10-05e5-43b7-b9eb-64346d364338 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1252.497949] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "refresh_cache-15544bb1-1353-4b19-ac1e-967f2e43713e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.498143] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired lock "refresh_cache-15544bb1-1353-4b19-ac1e-967f2e43713e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.498315] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1252.539483] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1252.702213] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Updating instance_info_cache with network_info: [{"id": "16773d10-05e5-43b7-b9eb-64346d364338", "address": "fa:16:3e:c2:cb:85", "network": {"id": "ec588e71-fce2-4e83-bdf5-929cc1ba9805", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1718376150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcf3dd62a69439a8bba32c3be865075", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16773d10-05", "ovs_interfaceid": "16773d10-05e5-43b7-b9eb-64346d364338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.716877] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Releasing lock "refresh_cache-15544bb1-1353-4b19-ac1e-967f2e43713e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.717217] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Instance network_info: |[{"id": "16773d10-05e5-43b7-b9eb-64346d364338", "address": "fa:16:3e:c2:cb:85", "network": {"id": "ec588e71-fce2-4e83-bdf5-929cc1ba9805", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1718376150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcf3dd62a69439a8bba32c3be865075", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16773d10-05", "ovs_interfaceid": "16773d10-05e5-43b7-b9eb-64346d364338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1252.717652] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:cb:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16773d10-05e5-43b7-b9eb-64346d364338', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1252.726079] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating folder: Project (2fcf3dd62a69439a8bba32c3be865075). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1252.726704] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1379b15-f5fa-4252-93ea-1af37e0b9b89 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.739839] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Created folder: Project (2fcf3dd62a69439a8bba32c3be865075) in parent group-v639843. [ 1252.740047] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating folder: Instances. Parent ref: group-v639912. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1252.740261] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74a4f613-2192-459a-b991-95abfffe44e9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.749186] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Created folder: Instances in parent group-v639912. [ 1252.749417] env[67424]: DEBUG oslo.service.loopingcall [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1252.749617] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1252.749812] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79c39072-adac-488c-a37d-e1fecd10a2bd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.768227] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1252.768227] env[67424]: value = "task-3200007" [ 1252.768227] env[67424]: _type = "Task" [ 1252.768227] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.775875] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200007, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.279041] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200007, 'name': CreateVM_Task, 'duration_secs': 0.305091} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.279183] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1253.279869] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.280050] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.280364] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1253.280609] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41606931-3cd7-4cd2-88ea-9e7fbfc93f94 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.284884] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 1253.284884] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521d3733-9c21-143f-1d86-5895ea44b35d" [ 1253.284884] env[67424]: _type = "Task" [ 1253.284884] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.292012] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521d3733-9c21-143f-1d86-5895ea44b35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.796573] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.796928] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1253.797098] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.438779] env[67424]: DEBUG nova.compute.manager [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Received event network-changed-16773d10-05e5-43b7-b9eb-64346d364338 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1254.439012] env[67424]: DEBUG nova.compute.manager [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Refreshing instance network info cache due to event network-changed-16773d10-05e5-43b7-b9eb-64346d364338. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1254.439237] env[67424]: DEBUG oslo_concurrency.lockutils [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] Acquiring lock "refresh_cache-15544bb1-1353-4b19-ac1e-967f2e43713e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.439383] env[67424]: DEBUG oslo_concurrency.lockutils [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] Acquired lock "refresh_cache-15544bb1-1353-4b19-ac1e-967f2e43713e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.439580] env[67424]: DEBUG nova.network.neutron [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Refreshing network info cache for port 16773d10-05e5-43b7-b9eb-64346d364338 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1254.820068] env[67424]: DEBUG nova.network.neutron [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Updated VIF entry in instance network info cache for port 16773d10-05e5-43b7-b9eb-64346d364338. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1254.820502] env[67424]: DEBUG nova.network.neutron [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Updating instance_info_cache with network_info: [{"id": "16773d10-05e5-43b7-b9eb-64346d364338", "address": "fa:16:3e:c2:cb:85", "network": {"id": "ec588e71-fce2-4e83-bdf5-929cc1ba9805", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1718376150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcf3dd62a69439a8bba32c3be865075", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16773d10-05", "ovs_interfaceid": "16773d10-05e5-43b7-b9eb-64346d364338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.830415] env[67424]: DEBUG oslo_concurrency.lockutils [req-bbd3cb0f-ed1a-48da-8de3-072df043c378 req-36b0dfdc-07a0-4e0a-9124-ba5b0ba13040 service nova] Releasing lock "refresh_cache-15544bb1-1353-4b19-ac1e-967f2e43713e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.671280] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1264.751363] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "15544bb1-1353-4b19-ac1e-967f2e43713e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.387861] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.388192] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1271.388869] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.384019] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.388321] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.387527] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.388417] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.387771] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.387963] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.401831] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.402069] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.402241] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.402397] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1277.403538] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd14ec8-78a9-4e1d-af4f-2c5a8538824a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.412720] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0332a4-8642-414a-a321-5847eb6609b1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.428491] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b86962-0e50-4734-b454-385017795eeb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.435350] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdfde1c-0ba4-4843-8ffb-2bf23789d530 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.464371] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181004MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1277.465602] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.465602] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.565549] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.565705] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.565836] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.565960] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.566105] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.566228] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.566347] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.566675] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.566675] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.566675] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1277.578845] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2ad24d5c-afc8-435e-a9b7-3b25a7ffd587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.596734] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 9eeec7b3-1a9f-4783-a84e-970d5a85129e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.610273] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.624739] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 28e7dff8-b59f-4357-a5b7-48e713d59fac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.638239] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2ad72ba1-1f79-4a2c-b411-3e0f51ca342b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.649226] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 654172d5-94b4-427b-930d-7e8d1fa31d36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.659341] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance b12975c8-0bce-41b6-afae-f78d34a9309f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.670374] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 784fdc06-dfeb-403b-b0b6-38399a4cf972 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.681896] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance fa2a9d05-598d-41ab-9b74-a3b50d49777d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.692444] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2f42c7da-46a0-4ae2-9ac9-92527183814b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1277.692840] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1277.692840] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1277.977980] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f7cb34-e37e-4a39-b055-1983e9200fe5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.989947] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee223f3-8064-4f64-b288-81a2e5910fd1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.028032] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5f36b0-6b5c-4645-ad28-dd4ee1bb4b71 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.036248] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03011e9e-c517-443e-a2c3-57a4b3b96d6e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.050703] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.062152] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1278.077408] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1278.077691] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.613s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.588800] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "31acf58b-8133-48e3-b942-2aa49a9cea6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.589730] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.077569] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.077765] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1280.077879] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1280.099300] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.099435] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.099550] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.099633] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.099758] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.099910] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.100048] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.100114] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.100439] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.100439] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1280.100439] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1288.003831] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.004178] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.406844] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.407257] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.034675] env[67424]: WARNING oslo_vmware.rw_handles [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1296.034675] env[67424]: ERROR oslo_vmware.rw_handles [ 1296.035281] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1296.036944] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1296.040273] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Copying Virtual Disk [datastore2] vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/72c01e7c-5204-4601-8f78-1f982fbf136e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1296.041128] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeaa43fe-ea8a-408c-986d-07d6ba37c78f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.051197] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Waiting for the task: (returnval){ [ 1296.051197] env[67424]: value = "task-3200008" [ 1296.051197] env[67424]: _type = "Task" [ 1296.051197] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.061418] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Task: {'id': task-3200008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.563084] env[67424]: DEBUG oslo_vmware.exceptions [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1296.563454] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.564028] env[67424]: ERROR nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.564028] env[67424]: Faults: ['InvalidArgument'] [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Traceback (most recent call last): [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] yield resources [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self.driver.spawn(context, instance, image_meta, [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self._fetch_image_if_missing(context, vi) [ 1296.564028] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] image_cache(vi, tmp_image_ds_loc) [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] vm_util.copy_virtual_disk( [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] session._wait_for_task(vmdk_copy_task) [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] return self.wait_for_task(task_ref) [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] return evt.wait() [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] result = hub.switch() [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1296.564388] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] return self.greenlet.switch() [ 1296.564743] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1296.564743] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self.f(*self.args, **self.kw) [ 1296.564743] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1296.564743] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] raise exceptions.translate_fault(task_info.error) [ 1296.564743] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.564743] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Faults: ['InvalidArgument'] [ 1296.564743] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] [ 1296.564743] env[67424]: INFO nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Terminating instance [ 1296.566012] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.566267] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.567088] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1296.567772] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1296.568131] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc9c196e-7318-434f-a0dd-36548cfda94c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.570686] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30dce78f-eb72-4588-9690-5f861f997507 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.577817] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1296.578076] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ecf915e-e7f8-4ff5-aca0-0a088c25dcef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.580442] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.580624] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1296.581628] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-040f197f-2e32-45e0-92f7-ed85b24a5674 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.586483] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Waiting for the task: (returnval){ [ 1296.586483] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52a0054c-12a3-8f00-28d8-d424d00fa48e" [ 1296.586483] env[67424]: _type = "Task" [ 1296.586483] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.598825] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52a0054c-12a3-8f00-28d8-d424d00fa48e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.677168] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1296.677471] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1296.677875] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Deleting the datastore file [datastore2] 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.678209] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97499630-2412-496f-b015-6e74d8d09654 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.686639] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Waiting for the task: (returnval){ [ 1296.686639] env[67424]: value = "task-3200010" [ 1296.686639] env[67424]: _type = "Task" [ 1296.686639] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.697878] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Task: {'id': task-3200010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.101046] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1297.101348] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Creating directory with path [datastore2] vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1297.101571] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a403f1a1-d465-4fa0-8359-41bb4302f435 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.116875] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Created directory with path [datastore2] vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.119831] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Fetch image to [datastore2] vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1297.119831] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1297.119831] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5301736e-69da-44fe-bd7e-8a51588bf242 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.124965] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919e4594-9ca7-4d64-a606-82720189c696 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.134315] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad02a3d0-5874-46ab-a42b-c51859c802c1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.169727] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f32b68-6dad-4d3b-ba33-2796f5b9ac50 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.177148] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c0164c0a-204d-4f68-bb9f-6255cb1ba856 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.196146] env[67424]: DEBUG oslo_vmware.api [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Task: {'id': task-3200010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070254} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.196413] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1297.196597] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1297.196769] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1297.196946] env[67424]: INFO nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1297.201648] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1297.203231] env[67424]: DEBUG nova.compute.claims [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1297.203231] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.203404] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.454909] env[67424]: DEBUG oslo_vmware.rw_handles [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1297.531228] env[67424]: DEBUG oslo_vmware.rw_handles [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1297.531420] env[67424]: DEBUG oslo_vmware.rw_handles [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1297.618504] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54917a8d-7ad4-48c9-80ab-da5e57c56994 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.626009] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3194773-c85e-4337-8e03-70988aa70582 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.662399] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29d90b4-d596-4357-8d0d-dcd5db6363f3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.671066] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a74aaf-9781-4ed6-9311-621fabde105b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.685454] env[67424]: DEBUG nova.compute.provider_tree [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.697876] env[67424]: DEBUG nova.scheduler.client.report [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1297.722023] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.518s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.724018] env[67424]: ERROR nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.724018] env[67424]: Faults: ['InvalidArgument'] [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Traceback (most recent call last): [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self.driver.spawn(context, instance, image_meta, [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self._fetch_image_if_missing(context, vi) [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] image_cache(vi, tmp_image_ds_loc) [ 1297.724018] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] vm_util.copy_virtual_disk( [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] session._wait_for_task(vmdk_copy_task) [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] return self.wait_for_task(task_ref) [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] return evt.wait() [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] result = hub.switch() [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] return self.greenlet.switch() [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1297.724474] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] self.f(*self.args, **self.kw) [ 1297.724885] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1297.724885] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] raise exceptions.translate_fault(task_info.error) [ 1297.724885] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.724885] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Faults: ['InvalidArgument'] [ 1297.724885] env[67424]: ERROR nova.compute.manager [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] [ 1297.724885] env[67424]: DEBUG nova.compute.utils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1297.729020] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Build of instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 was re-scheduled: A specified parameter was not correct: fileType [ 1297.729020] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1297.729020] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1297.729020] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1297.729020] env[67424]: DEBUG nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1297.729259] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1298.436859] env[67424]: DEBUG nova.network.neutron [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.454337] env[67424]: INFO nova.compute.manager [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Took 0.73 seconds to deallocate network for instance. [ 1298.585536] env[67424]: INFO nova.scheduler.client.report [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Deleted allocations for instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 [ 1298.624426] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d48f1294-0bd5-47a7-85cd-3aace7b823f2 tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 612.538s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.626024] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 412.642s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.626259] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Acquiring lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.626538] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.626654] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.633060] env[67424]: INFO nova.compute.manager [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Terminating instance [ 1298.638027] env[67424]: DEBUG nova.compute.manager [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1298.638027] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1298.638027] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32db694c-68ce-41c6-b5da-2584c0b1dc53 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.646952] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0ec623-b0fc-4c96-81a3-6c94014eb3cb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.661917] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 4b3afdb2-16fd-453c-b831-7ad5a0a74772] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1298.684844] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b6fd570-3691-4d29-8351-6c0d2fdb8e01 could not be found. [ 1298.686871] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1298.687151] env[67424]: INFO nova.compute.manager [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1298.687429] env[67424]: DEBUG oslo.service.loopingcall [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1298.687692] env[67424]: DEBUG nova.compute.manager [-] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1298.687833] env[67424]: DEBUG nova.network.neutron [-] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1298.701761] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 4b3afdb2-16fd-453c-b831-7ad5a0a74772] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1298.726298] env[67424]: DEBUG nova.network.neutron [-] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.742864] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "4b3afdb2-16fd-453c-b831-7ad5a0a74772" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.360s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.744640] env[67424]: INFO nova.compute.manager [-] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] Took 0.06 seconds to deallocate network for instance. [ 1298.755898] env[67424]: DEBUG nova.compute.manager [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] [instance: 0e1ff97a-fcea-4a95-a9fb-d35797c914fb] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1298.785413] env[67424]: DEBUG nova.compute.manager [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] [instance: 0e1ff97a-fcea-4a95-a9fb-d35797c914fb] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1298.818476] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Lock "0e1ff97a-fcea-4a95-a9fb-d35797c914fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.272s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.834240] env[67424]: DEBUG nova.compute.manager [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] [instance: 2580169a-b80b-43fe-bd63-9a09723a691e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1298.855854] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.856042] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.876485] env[67424]: DEBUG nova.compute.manager [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] [instance: 2580169a-b80b-43fe-bd63-9a09723a691e] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1298.896377] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9f37e33e-2e73-4293-9955-9d0df37c283a tempest-VolumesAssistedSnapshotsTest-1367036061 tempest-VolumesAssistedSnapshotsTest-1367036061-project-member] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.270s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.897589] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 64.154s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.899942] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2b6fd570-3691-4d29-8351-6c0d2fdb8e01] During sync_power_state the instance has a pending task (deleting). Skip. [ 1298.900569] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "2b6fd570-3691-4d29-8351-6c0d2fdb8e01" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.003s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.909860] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Lock "2580169a-b80b-43fe-bd63-9a09723a691e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.333s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.923410] env[67424]: DEBUG nova.compute.manager [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] [instance: 5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1298.948682] env[67424]: DEBUG nova.compute.manager [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] [instance: 5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1298.978225] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c6269003-2432-49ae-bc69-effef45beb51 tempest-ListServersNegativeTestJSON-1563326931 tempest-ListServersNegativeTestJSON-1563326931-project-member] Lock "5aa1dae2-9ad3-41f1-b76f-b9dadb5c2c75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.370s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.994837] env[67424]: DEBUG nova.compute.manager [None req-22094edd-9a40-4511-ac5e-cadf8f48d499 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 3525b872-dfbb-44d6-853d-8d0612cec3f6] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1299.023420] env[67424]: DEBUG nova.compute.manager [None req-22094edd-9a40-4511-ac5e-cadf8f48d499 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] [instance: 3525b872-dfbb-44d6-853d-8d0612cec3f6] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1299.046286] env[67424]: DEBUG oslo_concurrency.lockutils [None req-22094edd-9a40-4511-ac5e-cadf8f48d499 tempest-SecurityGroupsTestJSON-1873081317 tempest-SecurityGroupsTestJSON-1873081317-project-member] Lock "3525b872-dfbb-44d6-853d-8d0612cec3f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.385s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.057232] env[67424]: DEBUG nova.compute.manager [None req-d91e02eb-096a-40cf-8049-25b24e55c021 tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: 2ad24d5c-afc8-435e-a9b7-3b25a7ffd587] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1299.085640] env[67424]: DEBUG nova.compute.manager [None req-d91e02eb-096a-40cf-8049-25b24e55c021 tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: 2ad24d5c-afc8-435e-a9b7-3b25a7ffd587] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1299.110617] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d91e02eb-096a-40cf-8049-25b24e55c021 tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "2ad24d5c-afc8-435e-a9b7-3b25a7ffd587" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.153s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.126244] env[67424]: DEBUG nova.compute.manager [None req-4d984c4e-5f04-4592-bd6b-5c4f1117ef50 tempest-ServersAdminNegativeTestJSON-646082912 tempest-ServersAdminNegativeTestJSON-646082912-project-member] [instance: 9eeec7b3-1a9f-4783-a84e-970d5a85129e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1299.151590] env[67424]: DEBUG nova.compute.manager [None req-4d984c4e-5f04-4592-bd6b-5c4f1117ef50 tempest-ServersAdminNegativeTestJSON-646082912 tempest-ServersAdminNegativeTestJSON-646082912-project-member] [instance: 9eeec7b3-1a9f-4783-a84e-970d5a85129e] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1299.181555] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4d984c4e-5f04-4592-bd6b-5c4f1117ef50 tempest-ServersAdminNegativeTestJSON-646082912 tempest-ServersAdminNegativeTestJSON-646082912-project-member] Lock "9eeec7b3-1a9f-4783-a84e-970d5a85129e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.238s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.193837] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1299.261267] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.261533] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.264259] env[67424]: INFO nova.compute.claims [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1299.667475] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7577f9-924a-4669-96a5-07580eea6fc9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.677712] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5a605f-ce52-45c7-80ff-229d5d36c24e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.712324] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f66ae1-4fd8-4549-91ff-93a15815cbbc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.718906] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5d639e-67bb-47e3-8c2a-12a1c919e27c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.732800] env[67424]: DEBUG nova.compute.provider_tree [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.744147] env[67424]: DEBUG nova.scheduler.client.report [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1299.766018] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.503s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.766018] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1299.810119] env[67424]: DEBUG nova.compute.utils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1299.814848] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1299.814848] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1299.825889] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1299.911422] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1299.918948] env[67424]: DEBUG nova.policy [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c51b8467e9a4dd7b7259edacd7f0fca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '32577de731d749ffb2939075f98687dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1299.945122] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1299.945382] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1299.945540] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1299.945722] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1299.945870] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1299.946104] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1299.946252] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1299.946414] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1299.946586] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1299.946751] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1299.946930] env[67424]: DEBUG nova.virt.hardware [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1299.948255] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7d3407-f61e-4de7-9e0a-0e578478bdb8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.957017] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7629cb6-2e25-4275-8b79-78d096dbd502 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.591209] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Successfully created port: 67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.624624] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a16a39c7-777e-4325-bcb9-1faa7fdc0f7d tempest-ServerActionsTestOtherA-1713597554 tempest-ServerActionsTestOtherA-1713597554-project-member] Acquiring lock "0894ecdf-ae55-4d68-b7e4-35c3e3eeb789" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.624946] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a16a39c7-777e-4325-bcb9-1faa7fdc0f7d tempest-ServerActionsTestOtherA-1713597554 tempest-ServerActionsTestOtherA-1713597554-project-member] Lock "0894ecdf-ae55-4d68-b7e4-35c3e3eeb789" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.313832] env[67424]: DEBUG nova.compute.manager [req-c926ad8e-7a6d-400e-949b-3fb51d5a15b6 req-a2e4b542-b09e-48fe-ab32-ea4d0805aa00 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Received event network-vif-plugged-67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1302.314079] env[67424]: DEBUG oslo_concurrency.lockutils [req-c926ad8e-7a6d-400e-949b-3fb51d5a15b6 req-a2e4b542-b09e-48fe-ab32-ea4d0805aa00 service nova] Acquiring lock "4c3e649d-52e8-4c3d-9f0b-19077db44543-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.314300] env[67424]: DEBUG oslo_concurrency.lockutils [req-c926ad8e-7a6d-400e-949b-3fb51d5a15b6 req-a2e4b542-b09e-48fe-ab32-ea4d0805aa00 service nova] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.314470] env[67424]: DEBUG oslo_concurrency.lockutils [req-c926ad8e-7a6d-400e-949b-3fb51d5a15b6 req-a2e4b542-b09e-48fe-ab32-ea4d0805aa00 service nova] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.314668] env[67424]: DEBUG nova.compute.manager [req-c926ad8e-7a6d-400e-949b-3fb51d5a15b6 req-a2e4b542-b09e-48fe-ab32-ea4d0805aa00 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] No waiting events found dispatching network-vif-plugged-67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1302.314877] env[67424]: WARNING nova.compute.manager [req-c926ad8e-7a6d-400e-949b-3fb51d5a15b6 req-a2e4b542-b09e-48fe-ab32-ea4d0805aa00 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Received unexpected event network-vif-plugged-67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5 for instance with vm_state building and task_state spawning. [ 1302.400395] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Successfully updated port: 67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1302.410554] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "refresh_cache-4c3e649d-52e8-4c3d-9f0b-19077db44543" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.410554] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "refresh_cache-4c3e649d-52e8-4c3d-9f0b-19077db44543" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.411139] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1302.491558] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1302.822983] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Updating instance_info_cache with network_info: [{"id": "67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5", "address": "fa:16:3e:82:33:36", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67883e1e-2c", "ovs_interfaceid": "67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.840631] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "refresh_cache-4c3e649d-52e8-4c3d-9f0b-19077db44543" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.840960] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Instance network_info: |[{"id": "67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5", "address": "fa:16:3e:82:33:36", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67883e1e-2c", "ovs_interfaceid": "67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1302.841393] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:33:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1302.855823] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating folder: Project (32577de731d749ffb2939075f98687dc). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1302.858755] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be8bd3af-ff69-48da-b64a-7798ff00032f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.872540] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created folder: Project (32577de731d749ffb2939075f98687dc) in parent group-v639843. [ 1302.872752] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating folder: Instances. Parent ref: group-v639915. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1302.872965] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1616507b-91f4-41f1-b30d-dfef310e3265 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.882941] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created folder: Instances in parent group-v639915. [ 1302.883227] env[67424]: DEBUG oslo.service.loopingcall [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1302.883510] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1302.883726] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de0b8d5e-751a-49bc-bf42-6f5f772557de {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.904331] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1302.904331] env[67424]: value = "task-3200013" [ 1302.904331] env[67424]: _type = "Task" [ 1302.904331] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.912283] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200013, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.414198] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200013, 'name': CreateVM_Task, 'duration_secs': 0.342336} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.414371] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1303.432855] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.433063] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.433401] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1303.433664] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ff52399-37bc-4350-8e5d-3617bea314b3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.438598] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 1303.438598] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5201bad2-0901-7f1a-c283-96822cfb8b07" [ 1303.438598] env[67424]: _type = "Task" [ 1303.438598] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.446857] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5201bad2-0901-7f1a-c283-96822cfb8b07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.948718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.949190] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1303.949190] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.219061] env[67424]: DEBUG oslo_concurrency.lockutils [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "4c3e649d-52e8-4c3d-9f0b-19077db44543" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.371509] env[67424]: DEBUG nova.compute.manager [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Received event network-changed-67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1304.371751] env[67424]: DEBUG nova.compute.manager [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Refreshing instance network info cache due to event network-changed-67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1304.371943] env[67424]: DEBUG oslo_concurrency.lockutils [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] Acquiring lock "refresh_cache-4c3e649d-52e8-4c3d-9f0b-19077db44543" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.372101] env[67424]: DEBUG oslo_concurrency.lockutils [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] Acquired lock "refresh_cache-4c3e649d-52e8-4c3d-9f0b-19077db44543" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.372260] env[67424]: DEBUG nova.network.neutron [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Refreshing network info cache for port 67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1304.828236] env[67424]: DEBUG nova.network.neutron [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Updated VIF entry in instance network info cache for port 67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1304.828667] env[67424]: DEBUG nova.network.neutron [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Updating instance_info_cache with network_info: [{"id": "67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5", "address": "fa:16:3e:82:33:36", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67883e1e-2c", "ovs_interfaceid": "67883e1e-2cb5-4794-9fe5-d9d56bf9d5a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.850123] env[67424]: DEBUG oslo_concurrency.lockutils [req-5262669d-7043-4a6e-b3f6-5cc2f02d9be9 req-2542f7cd-194d-46c1-9cef-2385c2f76b65 service nova] Releasing lock "refresh_cache-4c3e649d-52e8-4c3d-9f0b-19077db44543" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.803083] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53b6c4ad-314d-486c-9280-3db96c269d82 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "2530f124-5c5f-419c-b258-30d0f40e0f89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.803400] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53b6c4ad-314d-486c-9280-3db96c269d82 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "2530f124-5c5f-419c-b258-30d0f40e0f89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.092563] env[67424]: DEBUG oslo_concurrency.lockutils [None req-17e1cefc-c0cc-49e3-996d-018e087b9770 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Acquiring lock "02b8ee24-437c-4da9-877d-cddb3b83c235" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.093216] env[67424]: DEBUG oslo_concurrency.lockutils [None req-17e1cefc-c0cc-49e3-996d-018e087b9770 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Lock "02b8ee24-437c-4da9-877d-cddb3b83c235" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.388439] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1331.388803] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1332.389535] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.383219] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.386746] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.387816] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.388159] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.388259] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.730205] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4116437c-22d8-4b27-bf9e-5fa0e14d0bf2 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Acquiring lock "f053a34a-f058-4c46-a525-fd01de9f8f57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.730995] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4116437c-22d8-4b27-bf9e-5fa0e14d0bf2 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "f053a34a-f058-4c46-a525-fd01de9f8f57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.387869] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1338.401618] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.401843] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.402118] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.402300] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1338.403437] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52806647-b44f-4d1d-86b8-994e6ab998e0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.412465] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d1d24f-8729-45ea-af70-30e996e1d98f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.433018] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e3a9ce-0c8f-4d88-9f81-5889d83668d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.443124] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7a087a-fa58-4334-8962-ebb0e1c7af7a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.476926] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181012MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1338.477212] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.477338] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.583227] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.583521] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a7d131b6-3584-48c3-acce-d553c145a837 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.583763] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.583995] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.584240] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.584456] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.584666] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.584874] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.585112] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.585343] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1338.601149] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 784fdc06-dfeb-403b-b0b6-38399a4cf972 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.614092] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance fa2a9d05-598d-41ab-9b74-a3b50d49777d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.630631] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2f42c7da-46a0-4ae2-9ac9-92527183814b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.643779] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.657763] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.669916] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.683755] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.698882] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0894ecdf-ae55-4d68-b7e4-35c3e3eeb789 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.712050] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2530f124-5c5f-419c-b258-30d0f40e0f89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.728717] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 02b8ee24-437c-4da9-877d-cddb3b83c235 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.741914] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f053a34a-f058-4c46-a525-fd01de9f8f57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1338.742208] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1338.742356] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1339.037722] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b872fd7c-ae1c-4278-b642-a2b984da5581 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.046606] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7f3db3-d0df-4211-9574-74aa90fde37c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.079659] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8eb6ae-f2f8-4e91-be0c-68a9ca632f75 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.087571] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcd2af0-0a38-438e-9995-e47643bf4b35 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.101195] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.134047] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1339.151728] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1339.151965] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.675s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.147519] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.387155] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.387336] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1340.387461] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1340.408033] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.408212] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.408333] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.408457] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.408582] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.408701] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.408818] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.408936] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.409065] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.409187] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1340.409307] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1346.053642] env[67424]: WARNING oslo_vmware.rw_handles [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1346.053642] env[67424]: ERROR oslo_vmware.rw_handles [ 1346.054277] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1346.056394] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1346.056700] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Copying Virtual Disk [datastore2] vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/04b347a2-f93b-4361-88dc-77596b769367/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1346.057038] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f98edcc-69ba-4962-8405-6264cb576fe1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.066569] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Waiting for the task: (returnval){ [ 1346.066569] env[67424]: value = "task-3200014" [ 1346.066569] env[67424]: _type = "Task" [ 1346.066569] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.074317] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Task: {'id': task-3200014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.577736] env[67424]: DEBUG oslo_vmware.exceptions [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1346.577736] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.578154] env[67424]: ERROR nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1346.578154] env[67424]: Faults: ['InvalidArgument'] [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Traceback (most recent call last): [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] yield resources [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self.driver.spawn(context, instance, image_meta, [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self._fetch_image_if_missing(context, vi) [ 1346.578154] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] image_cache(vi, tmp_image_ds_loc) [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] vm_util.copy_virtual_disk( [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] session._wait_for_task(vmdk_copy_task) [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] return self.wait_for_task(task_ref) [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] return evt.wait() [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] result = hub.switch() [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1346.578673] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] return self.greenlet.switch() [ 1346.579149] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1346.579149] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self.f(*self.args, **self.kw) [ 1346.579149] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1346.579149] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] raise exceptions.translate_fault(task_info.error) [ 1346.579149] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1346.579149] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Faults: ['InvalidArgument'] [ 1346.579149] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] [ 1346.579149] env[67424]: INFO nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Terminating instance [ 1346.580440] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.580667] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.581296] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1346.581482] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1346.581705] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d23fa61b-45de-44bc-881f-348529a51b30 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.583850] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3313c04-03b0-4942-b11c-aba0211906e7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.590373] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1346.590731] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8784bbe-ebb7-46b7-b1b2-f4cd67c5cc03 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.592620] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.592795] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1346.593738] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c4fed1b-d9fe-4ee1-ab46-971a326507c1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.598097] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Waiting for the task: (returnval){ [ 1346.598097] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5242d930-bbd0-bd21-3b43-f317c80c27cd" [ 1346.598097] env[67424]: _type = "Task" [ 1346.598097] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.606155] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5242d930-bbd0-bd21-3b43-f317c80c27cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.657680] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1346.657907] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1346.658099] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Deleting the datastore file [datastore2] 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.658367] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f19105cc-477a-4b7f-858d-46cefee0f533 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.665114] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Waiting for the task: (returnval){ [ 1346.665114] env[67424]: value = "task-3200016" [ 1346.665114] env[67424]: _type = "Task" [ 1346.665114] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.672461] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Task: {'id': task-3200016, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.108982] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1347.109293] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Creating directory with path [datastore2] vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1347.109541] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f48709aa-1982-4239-9660-0bb2d2f00ac0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.121739] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Created directory with path [datastore2] vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1347.121936] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Fetch image to [datastore2] vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1347.122177] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1347.122959] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd31d98-bb56-4750-98b0-bb5caa320dcc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.129392] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15085c31-2f0b-4cb9-8176-a8bfdbe21326 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.138494] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467a2517-7182-4345-beab-1d4a901c3714 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.172344] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ebeac8-1002-4d18-a423-ba97546c48ec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.179070] env[67424]: DEBUG oslo_vmware.api [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Task: {'id': task-3200016, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07597} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.180522] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.180751] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1347.180955] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1347.181155] env[67424]: INFO nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1347.182947] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e7663fa1-f26f-4bc8-9543-28deafca2760 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.184864] env[67424]: DEBUG nova.compute.claims [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1347.185047] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.185264] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.207156] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1347.387367] env[67424]: DEBUG oslo_vmware.rw_handles [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1347.449946] env[67424]: DEBUG oslo_vmware.rw_handles [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1347.450209] env[67424]: DEBUG oslo_vmware.rw_handles [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1347.504363] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0a6bfd-428f-4819-9ca2-92cf5166c6a9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.511766] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8cd69f-2d56-45f3-adca-a6ab2df3d222 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.542163] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cf60f0-c8ed-427a-99cc-c390d329b21d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.548881] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a44a8a8-51f4-446b-b219-ce6ad8c4f1ee {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.561599] env[67424]: DEBUG nova.compute.provider_tree [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.570192] env[67424]: DEBUG nova.scheduler.client.report [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1347.583910] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.399s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.584442] env[67424]: ERROR nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1347.584442] env[67424]: Faults: ['InvalidArgument'] [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Traceback (most recent call last): [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self.driver.spawn(context, instance, image_meta, [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self._fetch_image_if_missing(context, vi) [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] image_cache(vi, tmp_image_ds_loc) [ 1347.584442] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] vm_util.copy_virtual_disk( [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] session._wait_for_task(vmdk_copy_task) [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] return self.wait_for_task(task_ref) [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] return evt.wait() [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] result = hub.switch() [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] return self.greenlet.switch() [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1347.584848] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] self.f(*self.args, **self.kw) [ 1347.585273] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1347.585273] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] raise exceptions.translate_fault(task_info.error) [ 1347.585273] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1347.585273] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Faults: ['InvalidArgument'] [ 1347.585273] env[67424]: ERROR nova.compute.manager [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] [ 1347.585273] env[67424]: DEBUG nova.compute.utils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1347.586480] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Build of instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf was re-scheduled: A specified parameter was not correct: fileType [ 1347.586480] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1347.586841] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1347.587024] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1347.587199] env[67424]: DEBUG nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1347.587362] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1347.900345] env[67424]: DEBUG nova.network.neutron [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.916305] env[67424]: INFO nova.compute.manager [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Took 0.33 seconds to deallocate network for instance. [ 1348.010074] env[67424]: INFO nova.scheduler.client.report [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Deleted allocations for instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf [ 1348.037494] env[67424]: DEBUG oslo_concurrency.lockutils [None req-745e52d2-96ef-4f62-b9e2-81ded4c58759 tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 653.985s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.038938] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 450.118s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.039141] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Acquiring lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.039819] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.040591] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.042352] env[67424]: INFO nova.compute.manager [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Terminating instance [ 1348.044061] env[67424]: DEBUG nova.compute.manager [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1348.044287] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1348.044982] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90cb49e9-5168-49c1-97e9-79236cb62585 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.053007] env[67424]: DEBUG nova.compute.manager [None req-c5991818-0e9b-4218-a1da-29233d59e07c tempest-ServerMetadataTestJSON-543726691 tempest-ServerMetadataTestJSON-543726691-project-member] [instance: 28e7dff8-b59f-4357-a5b7-48e713d59fac] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.058031] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca854ca-b355-4818-9137-7b3ef916ff45 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.088233] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf could not be found. [ 1348.088505] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1348.088660] env[67424]: INFO nova.compute.manager [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1348.088904] env[67424]: DEBUG oslo.service.loopingcall [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1348.089380] env[67424]: DEBUG nova.compute.manager [None req-c5991818-0e9b-4218-a1da-29233d59e07c tempest-ServerMetadataTestJSON-543726691 tempest-ServerMetadataTestJSON-543726691-project-member] [instance: 28e7dff8-b59f-4357-a5b7-48e713d59fac] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1348.090459] env[67424]: DEBUG nova.compute.manager [-] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1348.090561] env[67424]: DEBUG nova.network.neutron [-] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1348.113388] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5991818-0e9b-4218-a1da-29233d59e07c tempest-ServerMetadataTestJSON-543726691 tempest-ServerMetadataTestJSON-543726691-project-member] Lock "28e7dff8-b59f-4357-a5b7-48e713d59fac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.407s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.122569] env[67424]: DEBUG nova.compute.manager [None req-412016ff-7c3d-4ba9-be74-8c0d736ec4b1 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] [instance: 2ad72ba1-1f79-4a2c-b411-3e0f51ca342b] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.132739] env[67424]: DEBUG nova.network.neutron [-] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.146754] env[67424]: INFO nova.compute.manager [-] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] Took 0.06 seconds to deallocate network for instance. [ 1348.152248] env[67424]: DEBUG nova.compute.manager [None req-412016ff-7c3d-4ba9-be74-8c0d736ec4b1 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] [instance: 2ad72ba1-1f79-4a2c-b411-3e0f51ca342b] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1348.174409] env[67424]: DEBUG oslo_concurrency.lockutils [None req-412016ff-7c3d-4ba9-be74-8c0d736ec4b1 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Lock "2ad72ba1-1f79-4a2c-b411-3e0f51ca342b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.184s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.186783] env[67424]: DEBUG nova.compute.manager [None req-0616d565-5ce4-4790-b209-62be7a7a8eac tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] [instance: 654172d5-94b4-427b-930d-7e8d1fa31d36] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.215305] env[67424]: DEBUG nova.compute.manager [None req-0616d565-5ce4-4790-b209-62be7a7a8eac tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] [instance: 654172d5-94b4-427b-930d-7e8d1fa31d36] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1348.236949] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0616d565-5ce4-4790-b209-62be7a7a8eac tempest-ServersTestMultiNic-1833545920 tempest-ServersTestMultiNic-1833545920-project-member] Lock "654172d5-94b4-427b-930d-7e8d1fa31d36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.125s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.256020] env[67424]: DEBUG nova.compute.manager [None req-3519e644-fb6a-4107-bfcf-8816187984c4 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: b12975c8-0bce-41b6-afae-f78d34a9309f] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.256020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4ee83cb5-5905-41c2-87f4-1bc25012743f tempest-ServersAdminTestJSON-1008777526 tempest-ServersAdminTestJSON-1008777526-project-member] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.256020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 113.512s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.256491] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf] During sync_power_state the instance has a pending task (deleting). Skip. [ 1348.256811] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "0f2c77b5-a0ef-4314-8a5b-29f3b0a1dddf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.281236] env[67424]: DEBUG nova.compute.manager [None req-3519e644-fb6a-4107-bfcf-8816187984c4 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: b12975c8-0bce-41b6-afae-f78d34a9309f] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1348.300030] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3519e644-fb6a-4107-bfcf-8816187984c4 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "b12975c8-0bce-41b6-afae-f78d34a9309f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.412s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.309831] env[67424]: DEBUG nova.compute.manager [None req-7f26083d-403e-4ff7-a68d-0486726951e6 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] [instance: 784fdc06-dfeb-403b-b0b6-38399a4cf972] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.333852] env[67424]: DEBUG nova.compute.manager [None req-7f26083d-403e-4ff7-a68d-0486726951e6 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] [instance: 784fdc06-dfeb-403b-b0b6-38399a4cf972] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1348.356818] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7f26083d-403e-4ff7-a68d-0486726951e6 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] Lock "784fdc06-dfeb-403b-b0b6-38399a4cf972" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.528s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.365054] env[67424]: DEBUG nova.compute.manager [None req-128e19a6-605b-4ff1-baf8-62d15cb44586 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] [instance: fa2a9d05-598d-41ab-9b74-a3b50d49777d] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.388165] env[67424]: DEBUG nova.compute.manager [None req-128e19a6-605b-4ff1-baf8-62d15cb44586 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] [instance: fa2a9d05-598d-41ab-9b74-a3b50d49777d] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1348.409792] env[67424]: DEBUG oslo_concurrency.lockutils [None req-128e19a6-605b-4ff1-baf8-62d15cb44586 tempest-ServerShowV247Test-1344574366 tempest-ServerShowV247Test-1344574366-project-member] Lock "fa2a9d05-598d-41ab-9b74-a3b50d49777d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.063s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.419131] env[67424]: DEBUG nova.compute.manager [None req-fbc43216-8dfa-4a4e-9957-4a0af7e52239 tempest-ServersNegativeTestJSON-1738428696 tempest-ServersNegativeTestJSON-1738428696-project-member] [instance: 2f42c7da-46a0-4ae2-9ac9-92527183814b] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.445027] env[67424]: DEBUG nova.compute.manager [None req-fbc43216-8dfa-4a4e-9957-4a0af7e52239 tempest-ServersNegativeTestJSON-1738428696 tempest-ServersNegativeTestJSON-1738428696-project-member] [instance: 2f42c7da-46a0-4ae2-9ac9-92527183814b] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1348.466013] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fbc43216-8dfa-4a4e-9957-4a0af7e52239 tempest-ServersNegativeTestJSON-1738428696 tempest-ServersNegativeTestJSON-1738428696-project-member] Lock "2f42c7da-46a0-4ae2-9ac9-92527183814b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.588s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.474184] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1348.520990] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.521268] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.522685] env[67424]: INFO nova.compute.claims [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1348.748903] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e6a0ee-a6ff-402d-b66f-07dad3a364bd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.756176] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a5a9b2-13c5-440a-9263-83c1b61e3c30 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.785045] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cf8717-461f-4934-af01-d2c9331a95e4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.791462] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ddbdcd-9c72-43d4-9658-87104b073f77 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.803790] env[67424]: DEBUG nova.compute.provider_tree [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1348.812284] env[67424]: DEBUG nova.scheduler.client.report [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1348.825451] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.825893] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1348.857024] env[67424]: DEBUG nova.compute.utils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1348.857962] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1348.858327] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1348.866539] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1348.928742] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1348.934156] env[67424]: DEBUG nova.policy [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ac8098a2a904b4292a23bc38e8be219', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc66a2bf57d34e309f0f21a60c224076', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1348.954625] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1348.954876] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1348.955045] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1348.955236] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1348.955386] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1348.955534] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1348.955738] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1348.955899] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1348.956084] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1348.956252] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1348.956549] env[67424]: DEBUG nova.virt.hardware [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1348.957405] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad3cf0d-9f5f-4d0a-bc75-af734b282d1e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.965429] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcfdc1d-9d7d-42f5-9a00-6638565830aa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.666078] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Successfully created port: 11e28bce-24b9-4cea-b2a8-8a20ec1d771a {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1350.282620] env[67424]: DEBUG nova.compute.manager [req-06926305-a022-4f3e-985e-df7b53ffe9f2 req-d3d36138-f1e5-4068-9f79-20ab1d3bfc86 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Received event network-vif-plugged-11e28bce-24b9-4cea-b2a8-8a20ec1d771a {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1350.283043] env[67424]: DEBUG oslo_concurrency.lockutils [req-06926305-a022-4f3e-985e-df7b53ffe9f2 req-d3d36138-f1e5-4068-9f79-20ab1d3bfc86 service nova] Acquiring lock "31acf58b-8133-48e3-b942-2aa49a9cea6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.283610] env[67424]: DEBUG oslo_concurrency.lockutils [req-06926305-a022-4f3e-985e-df7b53ffe9f2 req-d3d36138-f1e5-4068-9f79-20ab1d3bfc86 service nova] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.283954] env[67424]: DEBUG oslo_concurrency.lockutils [req-06926305-a022-4f3e-985e-df7b53ffe9f2 req-d3d36138-f1e5-4068-9f79-20ab1d3bfc86 service nova] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.285023] env[67424]: DEBUG nova.compute.manager [req-06926305-a022-4f3e-985e-df7b53ffe9f2 req-d3d36138-f1e5-4068-9f79-20ab1d3bfc86 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] No waiting events found dispatching network-vif-plugged-11e28bce-24b9-4cea-b2a8-8a20ec1d771a {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1350.285023] env[67424]: WARNING nova.compute.manager [req-06926305-a022-4f3e-985e-df7b53ffe9f2 req-d3d36138-f1e5-4068-9f79-20ab1d3bfc86 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Received unexpected event network-vif-plugged-11e28bce-24b9-4cea-b2a8-8a20ec1d771a for instance with vm_state building and task_state spawning. [ 1350.356882] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Successfully updated port: 11e28bce-24b9-4cea-b2a8-8a20ec1d771a {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1350.366195] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "refresh_cache-31acf58b-8133-48e3-b942-2aa49a9cea6b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.366195] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "refresh_cache-31acf58b-8133-48e3-b942-2aa49a9cea6b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.366347] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1350.405483] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1350.659037] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Updating instance_info_cache with network_info: [{"id": "11e28bce-24b9-4cea-b2a8-8a20ec1d771a", "address": "fa:16:3e:0e:18:28", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11e28bce-24", "ovs_interfaceid": "11e28bce-24b9-4cea-b2a8-8a20ec1d771a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.673689] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "refresh_cache-31acf58b-8133-48e3-b942-2aa49a9cea6b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.674016] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Instance network_info: |[{"id": "11e28bce-24b9-4cea-b2a8-8a20ec1d771a", "address": "fa:16:3e:0e:18:28", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11e28bce-24", "ovs_interfaceid": "11e28bce-24b9-4cea-b2a8-8a20ec1d771a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1350.674427] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:18:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11e28bce-24b9-4cea-b2a8-8a20ec1d771a', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.682363] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating folder: Project (dc66a2bf57d34e309f0f21a60c224076). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1350.682901] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70d6e8e0-584f-4a79-8ab2-dd81947d29cc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.694846] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created folder: Project (dc66a2bf57d34e309f0f21a60c224076) in parent group-v639843. [ 1350.695046] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating folder: Instances. Parent ref: group-v639918. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1350.695280] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4715e238-3ff2-4a65-a64e-3a0ffca19487 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.704976] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created folder: Instances in parent group-v639918. [ 1350.705237] env[67424]: DEBUG oslo.service.loopingcall [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.705431] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1350.705665] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b35a18d7-2679-4f42-a828-9898e02b46c3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.726050] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.726050] env[67424]: value = "task-3200019" [ 1350.726050] env[67424]: _type = "Task" [ 1350.726050] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.732983] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200019, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.235535] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200019, 'name': CreateVM_Task, 'duration_secs': 0.26976} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.235994] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1351.236785] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.237132] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.239328] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1351.239328] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55099945-a8e9-4d83-ac3a-10df0f232edb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.242400] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 1351.242400] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52f8fb93-07c5-d594-9752-cb175a24de1a" [ 1351.242400] env[67424]: _type = "Task" [ 1351.242400] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.250031] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52f8fb93-07c5-d594-9752-cb175a24de1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.752447] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.752811] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1351.752906] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.308658] env[67424]: DEBUG nova.compute.manager [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Received event network-changed-11e28bce-24b9-4cea-b2a8-8a20ec1d771a {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1352.308893] env[67424]: DEBUG nova.compute.manager [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Refreshing instance network info cache due to event network-changed-11e28bce-24b9-4cea-b2a8-8a20ec1d771a. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1352.309151] env[67424]: DEBUG oslo_concurrency.lockutils [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] Acquiring lock "refresh_cache-31acf58b-8133-48e3-b942-2aa49a9cea6b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.309315] env[67424]: DEBUG oslo_concurrency.lockutils [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] Acquired lock "refresh_cache-31acf58b-8133-48e3-b942-2aa49a9cea6b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.309481] env[67424]: DEBUG nova.network.neutron [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Refreshing network info cache for port 11e28bce-24b9-4cea-b2a8-8a20ec1d771a {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1352.603624] env[67424]: DEBUG nova.network.neutron [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Updated VIF entry in instance network info cache for port 11e28bce-24b9-4cea-b2a8-8a20ec1d771a. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1352.604095] env[67424]: DEBUG nova.network.neutron [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Updating instance_info_cache with network_info: [{"id": "11e28bce-24b9-4cea-b2a8-8a20ec1d771a", "address": "fa:16:3e:0e:18:28", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11e28bce-24", "ovs_interfaceid": "11e28bce-24b9-4cea-b2a8-8a20ec1d771a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.613965] env[67424]: DEBUG oslo_concurrency.lockutils [req-6addb732-29f8-4071-b1bf-794eca597d49 req-85495a71-0f4e-445e-bd55-893a2cfb40f3 service nova] Releasing lock "refresh_cache-31acf58b-8133-48e3-b942-2aa49a9cea6b" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.018907] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "afd0f239-0752-4e2e-a232-9f22722753f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.019200] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "afd0f239-0752-4e2e-a232-9f22722753f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.388074] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.388074] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1392.388948] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.383015] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.387692] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.069254] env[67424]: WARNING oslo_vmware.rw_handles [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1396.069254] env[67424]: ERROR oslo_vmware.rw_handles [ 1396.069777] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1396.071870] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1396.072136] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Copying Virtual Disk [datastore2] vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/248ec742-c766-4d9a-80c8-225142fc2d43/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1396.072447] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72dacb8b-6ae6-4f59-8ef0-10a266c20629 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.079924] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Waiting for the task: (returnval){ [ 1396.079924] env[67424]: value = "task-3200020" [ 1396.079924] env[67424]: _type = "Task" [ 1396.079924] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.087511] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Task: {'id': task-3200020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.388252] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.590770] env[67424]: DEBUG oslo_vmware.exceptions [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1396.591105] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.591669] env[67424]: ERROR nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1396.591669] env[67424]: Faults: ['InvalidArgument'] [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] Traceback (most recent call last): [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] yield resources [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self.driver.spawn(context, instance, image_meta, [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self._fetch_image_if_missing(context, vi) [ 1396.591669] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] image_cache(vi, tmp_image_ds_loc) [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] vm_util.copy_virtual_disk( [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] session._wait_for_task(vmdk_copy_task) [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] return self.wait_for_task(task_ref) [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] return evt.wait() [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] result = hub.switch() [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1396.591971] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] return self.greenlet.switch() [ 1396.592240] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1396.592240] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self.f(*self.args, **self.kw) [ 1396.592240] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1396.592240] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] raise exceptions.translate_fault(task_info.error) [ 1396.592240] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1396.592240] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] Faults: ['InvalidArgument'] [ 1396.592240] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] [ 1396.592240] env[67424]: INFO nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Terminating instance [ 1396.594140] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.594140] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1396.594140] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bec7c390-7e6d-4a85-8cb5-cddbde261983 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.596279] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1396.596470] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1396.597204] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c669ab52-553a-4d72-89fa-722fbc959ac7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.604145] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1396.604598] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03d0ee03-9c00-49f9-93e0-6b7d35834c07 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.608409] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1396.608582] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1396.609311] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43317eae-33f9-4bcc-82fe-207c82a69a25 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.613890] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Waiting for the task: (returnval){ [ 1396.613890] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b0e504-41c3-22f8-4f72-1ae0f3ea8add" [ 1396.613890] env[67424]: _type = "Task" [ 1396.613890] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.684602] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1396.684868] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1396.685066] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Deleting the datastore file [datastore2] a7d131b6-3584-48c3-acce-d553c145a837 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1396.685343] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80eefad6-ef87-4d7e-bfde-969410d73763 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.691714] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Waiting for the task: (returnval){ [ 1396.691714] env[67424]: value = "task-3200022" [ 1396.691714] env[67424]: _type = "Task" [ 1396.691714] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.699344] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Task: {'id': task-3200022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.124195] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1397.124507] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Creating directory with path [datastore2] vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1397.124706] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2393bed8-7243-4130-8cbe-55f47162d973 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.135492] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Created directory with path [datastore2] vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1397.135676] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Fetch image to [datastore2] vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1397.135846] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1397.136594] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a01c4d7-9373-4755-a531-0a72d6da3f32 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.142929] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815f0cea-19f9-4d46-9d54-c2d512dff3b4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.151937] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db33741a-7e93-4783-aa07-08c6d0f757a9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.183016] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982bfa7d-7a80-4378-a57c-5ec31c6add4c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.188422] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-729e27f8-2308-415d-a1d9-2d2f6c34b37c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.199373] env[67424]: DEBUG oslo_vmware.api [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Task: {'id': task-3200022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075599} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.199593] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1397.199772] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1397.199942] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1397.200137] env[67424]: INFO nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1397.202370] env[67424]: DEBUG nova.compute.claims [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1397.202486] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.202701] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.206765] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1397.264887] env[67424]: DEBUG nova.scheduler.client.report [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1397.280771] env[67424]: DEBUG nova.scheduler.client.report [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1397.281058] env[67424]: DEBUG nova.compute.provider_tree [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1397.292964] env[67424]: DEBUG nova.scheduler.client.report [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1397.311283] env[67424]: DEBUG nova.scheduler.client.report [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1397.350081] env[67424]: DEBUG oslo_vmware.rw_handles [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1397.407287] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.411736] env[67424]: DEBUG oslo_vmware.rw_handles [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1397.411912] env[67424]: DEBUG oslo_vmware.rw_handles [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1397.557252] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f0ab26-9174-45a4-9c0f-ef99cdb3038b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.564995] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3bb410-e7b7-4539-a0ef-f79dd83f35cd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.594929] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54c3085-266f-42ba-8d1d-461fa6da4351 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.601485] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57be7f8-ac3e-4db1-b1ee-628ed636085c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.613896] env[67424]: DEBUG nova.compute.provider_tree [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.622883] env[67424]: DEBUG nova.scheduler.client.report [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1397.638694] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.436s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.639300] env[67424]: ERROR nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1397.639300] env[67424]: Faults: ['InvalidArgument'] [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] Traceback (most recent call last): [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self.driver.spawn(context, instance, image_meta, [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self._fetch_image_if_missing(context, vi) [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] image_cache(vi, tmp_image_ds_loc) [ 1397.639300] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] vm_util.copy_virtual_disk( [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] session._wait_for_task(vmdk_copy_task) [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] return self.wait_for_task(task_ref) [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] return evt.wait() [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] result = hub.switch() [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] return self.greenlet.switch() [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1397.639670] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] self.f(*self.args, **self.kw) [ 1397.639979] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1397.639979] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] raise exceptions.translate_fault(task_info.error) [ 1397.639979] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1397.639979] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] Faults: ['InvalidArgument'] [ 1397.639979] env[67424]: ERROR nova.compute.manager [instance: a7d131b6-3584-48c3-acce-d553c145a837] [ 1397.640122] env[67424]: DEBUG nova.compute.utils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1397.641417] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Build of instance a7d131b6-3584-48c3-acce-d553c145a837 was re-scheduled: A specified parameter was not correct: fileType [ 1397.641417] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1397.641767] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1397.641936] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1397.642126] env[67424]: DEBUG nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1397.642284] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1398.142568] env[67424]: DEBUG nova.network.neutron [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.155956] env[67424]: INFO nova.compute.manager [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Took 0.51 seconds to deallocate network for instance. [ 1398.259199] env[67424]: INFO nova.scheduler.client.report [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Deleted allocations for instance a7d131b6-3584-48c3-acce-d553c145a837 [ 1398.278122] env[67424]: DEBUG oslo_concurrency.lockutils [None req-25c54c8a-6e17-4c47-9d96-898bf47fe157 tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "a7d131b6-3584-48c3-acce-d553c145a837" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 687.669s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.279287] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "a7d131b6-3584-48c3-acce-d553c145a837" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 489.140s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.279519] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Acquiring lock "a7d131b6-3584-48c3-acce-d553c145a837-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.279728] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "a7d131b6-3584-48c3-acce-d553c145a837-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.279894] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "a7d131b6-3584-48c3-acce-d553c145a837-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.281880] env[67424]: INFO nova.compute.manager [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Terminating instance [ 1398.283565] env[67424]: DEBUG nova.compute.manager [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1398.283755] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1398.284252] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-983e55be-5bd3-444d-afbc-1f5073ab75dd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.293243] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79dcd5b2-c669-49c4-ab5f-1389f9008af7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.304116] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1398.324480] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a7d131b6-3584-48c3-acce-d553c145a837 could not be found. [ 1398.324707] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1398.324891] env[67424]: INFO nova.compute.manager [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1398.325167] env[67424]: DEBUG oslo.service.loopingcall [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.325502] env[67424]: DEBUG nova.compute.manager [-] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1398.325616] env[67424]: DEBUG nova.network.neutron [-] [instance: a7d131b6-3584-48c3-acce-d553c145a837] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1398.353937] env[67424]: DEBUG nova.network.neutron [-] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.361870] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.361870] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.363247] env[67424]: INFO nova.compute.claims [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1398.366498] env[67424]: INFO nova.compute.manager [-] [instance: a7d131b6-3584-48c3-acce-d553c145a837] Took 0.04 seconds to deallocate network for instance. [ 1398.387905] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1398.388837] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1398.405221] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.466296] env[67424]: DEBUG oslo_concurrency.lockutils [None req-ef3f2549-3534-4e5c-835b-be7c2df6fedb tempest-InstanceActionsV221TestJSON-1152893157 tempest-InstanceActionsV221TestJSON-1152893157-project-member] Lock "a7d131b6-3584-48c3-acce-d553c145a837" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.467268] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "a7d131b6-3584-48c3-acce-d553c145a837" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 163.723s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.467470] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a7d131b6-3584-48c3-acce-d553c145a837] During sync_power_state the instance has a pending task (deleting). Skip. [ 1398.467647] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "a7d131b6-3584-48c3-acce-d553c145a837" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.639070] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1323da0-bd26-4cf6-9f3e-f3807ea9b26f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.647419] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b196931e-c9e9-4667-a116-67afb2f40e6c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.679434] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14abe0a-c0c0-4c52-8ed6-d808be85af63 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.687946] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccf2689-e96c-422c-9039-6d45b82f6a79 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.702147] env[67424]: DEBUG nova.compute.provider_tree [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1398.714618] env[67424]: DEBUG nova.scheduler.client.report [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1398.729162] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.368s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.729788] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1398.733223] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.328s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.733545] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.733726] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1398.734871] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c02d76-4c69-4449-9642-da912248912f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.743949] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0999494f-318f-4e2d-ac6f-ee1772942794 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.760626] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aac6087-1410-4b46-9848-f75cb80cd779 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.769945] env[67424]: DEBUG nova.compute.utils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1398.771262] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d03a8ab-e523-4a31-a448-323966c369dd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.775611] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1398.775857] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1398.780521] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1398.809408] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181004MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1398.809589] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.809767] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.855639] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1398.870684] env[67424]: DEBUG nova.policy [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b65c4e3396554092b2d2227443e1566e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1175d0fb2c454022bcc36081c9df063d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1398.888324] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1398.888604] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1398.888765] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1398.888946] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1398.889108] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1398.889262] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1398.889471] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1398.889630] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1398.889802] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1398.890030] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1398.890222] env[67424]: DEBUG nova.virt.hardware [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1398.891396] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dac0b6-b7c2-4d8e-87e4-ba7370a46944 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.894950] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 850df4c3-3a92-47d3-973d-62f41d813f6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.895115] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.895270] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.895385] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.895528] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.895681] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.895806] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.895933] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.896071] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.896191] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1398.902615] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fc8a2e-51c6-4935-b034-e3d537892849 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.917845] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1398.929559] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1398.942731] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0894ecdf-ae55-4d68-b7e4-35c3e3eeb789 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1398.954268] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2530f124-5c5f-419c-b258-30d0f40e0f89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1398.966832] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 02b8ee24-437c-4da9-877d-cddb3b83c235 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1398.978144] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f053a34a-f058-4c46-a525-fd01de9f8f57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1398.991760] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1398.991760] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1398.991760] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1399.243022] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc427076-fd8a-4b73-b21f-9bce767d08b5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.250961] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869ec512-65a1-43ac-b909-9dfaf36d126c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.283710] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f677fd-5b61-4a8f-bc9e-9abfebcf1207 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.291688] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df29b25-3684-40f8-870b-6b0caea6ccf4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.306152] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.316806] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1399.336117] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1399.336315] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.527s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.588372] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Successfully created port: 9ecdd1bc-5280-46ca-88c2-b59f83285f14 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1400.321867] env[67424]: DEBUG nova.compute.manager [req-e90ac964-e761-4da5-99e0-71d80306c055 req-0f9acb69-3533-4a19-8e74-5e566698a47a service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Received event network-vif-plugged-9ecdd1bc-5280-46ca-88c2-b59f83285f14 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1400.322143] env[67424]: DEBUG oslo_concurrency.lockutils [req-e90ac964-e761-4da5-99e0-71d80306c055 req-0f9acb69-3533-4a19-8e74-5e566698a47a service nova] Acquiring lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.322391] env[67424]: DEBUG oslo_concurrency.lockutils [req-e90ac964-e761-4da5-99e0-71d80306c055 req-0f9acb69-3533-4a19-8e74-5e566698a47a service nova] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.322570] env[67424]: DEBUG oslo_concurrency.lockutils [req-e90ac964-e761-4da5-99e0-71d80306c055 req-0f9acb69-3533-4a19-8e74-5e566698a47a service nova] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.322745] env[67424]: DEBUG nova.compute.manager [req-e90ac964-e761-4da5-99e0-71d80306c055 req-0f9acb69-3533-4a19-8e74-5e566698a47a service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] No waiting events found dispatching network-vif-plugged-9ecdd1bc-5280-46ca-88c2-b59f83285f14 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1400.322907] env[67424]: WARNING nova.compute.manager [req-e90ac964-e761-4da5-99e0-71d80306c055 req-0f9acb69-3533-4a19-8e74-5e566698a47a service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Received unexpected event network-vif-plugged-9ecdd1bc-5280-46ca-88c2-b59f83285f14 for instance with vm_state building and task_state spawning. [ 1400.602961] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Successfully updated port: 9ecdd1bc-5280-46ca-88c2-b59f83285f14 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1400.621254] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "refresh_cache-77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.621422] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "refresh_cache-77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.621560] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1400.710106] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1400.909582] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Updating instance_info_cache with network_info: [{"id": "9ecdd1bc-5280-46ca-88c2-b59f83285f14", "address": "fa:16:3e:31:1b:be", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecdd1bc-52", "ovs_interfaceid": "9ecdd1bc-5280-46ca-88c2-b59f83285f14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.923685] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "refresh_cache-77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.923984] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Instance network_info: |[{"id": "9ecdd1bc-5280-46ca-88c2-b59f83285f14", "address": "fa:16:3e:31:1b:be", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecdd1bc-52", "ovs_interfaceid": "9ecdd1bc-5280-46ca-88c2-b59f83285f14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1400.924411] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:1b:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24210a23-d8ac-4f4f-84ac-dc0636de9a72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ecdd1bc-5280-46ca-88c2-b59f83285f14', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1400.932444] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating folder: Project (1175d0fb2c454022bcc36081c9df063d). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1400.933072] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a799ebe-2f84-4d4e-a66d-ca46f682f938 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.945450] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created folder: Project (1175d0fb2c454022bcc36081c9df063d) in parent group-v639843. [ 1400.945647] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating folder: Instances. Parent ref: group-v639921. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1400.945877] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04b5900e-ff61-43be-8e36-a71789a8fe42 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.953464] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created folder: Instances in parent group-v639921. [ 1400.953689] env[67424]: DEBUG oslo.service.loopingcall [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1400.953865] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1400.954067] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90b43be9-9ef6-405f-8865-f70d5d6b604a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.972150] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1400.972150] env[67424]: value = "task-3200025" [ 1400.972150] env[67424]: _type = "Task" [ 1400.972150] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.979158] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200025, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.481740] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200025, 'name': CreateVM_Task, 'duration_secs': 0.274466} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.482250] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1401.482625] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.482800] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.483135] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1401.483394] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-402b6f71-45c9-4ed6-b5ba-e43c5d0f0a55 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.487404] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 1401.487404] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]524b7449-fca9-c054-45bf-e2b91a2a785d" [ 1401.487404] env[67424]: _type = "Task" [ 1401.487404] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.494516] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]524b7449-fca9-c054-45bf-e2b91a2a785d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.997364] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.997624] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1401.997839] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.336591] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.336779] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1402.336903] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1402.353043] env[67424]: DEBUG nova.compute.manager [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Received event network-changed-9ecdd1bc-5280-46ca-88c2-b59f83285f14 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1402.353043] env[67424]: DEBUG nova.compute.manager [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Refreshing instance network info cache due to event network-changed-9ecdd1bc-5280-46ca-88c2-b59f83285f14. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1402.353266] env[67424]: DEBUG oslo_concurrency.lockutils [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] Acquiring lock "refresh_cache-77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.353483] env[67424]: DEBUG oslo_concurrency.lockutils [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] Acquired lock "refresh_cache-77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.353760] env[67424]: DEBUG nova.network.neutron [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Refreshing network info cache for port 9ecdd1bc-5280-46ca-88c2-b59f83285f14 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1402.360736] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.360884] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361030] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361230] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361363] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361483] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361599] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361714] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361828] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.361941] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1402.362084] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1402.668267] env[67424]: DEBUG nova.network.neutron [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Updated VIF entry in instance network info cache for port 9ecdd1bc-5280-46ca-88c2-b59f83285f14. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1402.668673] env[67424]: DEBUG nova.network.neutron [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Updating instance_info_cache with network_info: [{"id": "9ecdd1bc-5280-46ca-88c2-b59f83285f14", "address": "fa:16:3e:31:1b:be", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecdd1bc-52", "ovs_interfaceid": "9ecdd1bc-5280-46ca-88c2-b59f83285f14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.678291] env[67424]: DEBUG oslo_concurrency.lockutils [req-5ebeb02a-a01f-4513-8f22-bf5a22615801 req-1a4318c2-9178-438c-9e0d-aae9278a9ac3 service nova] Releasing lock "refresh_cache-77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.598103] env[67424]: WARNING oslo_vmware.rw_handles [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1442.598103] env[67424]: ERROR oslo_vmware.rw_handles [ 1442.598790] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1442.600893] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1442.601246] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Copying Virtual Disk [datastore2] vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/18373236-891d-4c80-a744-c4681ecc33ae/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1442.601612] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab70a537-5833-42ae-a25d-75b48416bd70 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.608935] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Waiting for the task: (returnval){ [ 1442.608935] env[67424]: value = "task-3200026" [ 1442.608935] env[67424]: _type = "Task" [ 1442.608935] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.617080] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Task: {'id': task-3200026, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.119438] env[67424]: DEBUG oslo_vmware.exceptions [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1443.119755] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.120314] env[67424]: ERROR nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1443.120314] env[67424]: Faults: ['InvalidArgument'] [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Traceback (most recent call last): [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] yield resources [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self.driver.spawn(context, instance, image_meta, [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self._fetch_image_if_missing(context, vi) [ 1443.120314] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] image_cache(vi, tmp_image_ds_loc) [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] vm_util.copy_virtual_disk( [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] session._wait_for_task(vmdk_copy_task) [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] return self.wait_for_task(task_ref) [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] return evt.wait() [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] result = hub.switch() [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1443.120748] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] return self.greenlet.switch() [ 1443.121069] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1443.121069] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self.f(*self.args, **self.kw) [ 1443.121069] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1443.121069] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] raise exceptions.translate_fault(task_info.error) [ 1443.121069] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1443.121069] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Faults: ['InvalidArgument'] [ 1443.121069] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] [ 1443.121069] env[67424]: INFO nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Terminating instance [ 1443.122219] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.122426] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1443.122672] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1f9f85c-d4c4-498b-a7bc-f69e3570242a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.124865] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1443.125070] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1443.125787] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc43f1e6-2477-4dbe-9a07-ec49152438da {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.133663] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1443.133881] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77eea5a1-89cd-46eb-b3d4-75c1c6033413 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.135930] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1443.136115] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1443.137051] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b393820-757c-4d4c-a400-f405401e41a9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.142240] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1443.142240] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]525aa0b2-640c-5d6b-9894-e9900e2452b3" [ 1443.142240] env[67424]: _type = "Task" [ 1443.142240] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.149307] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]525aa0b2-640c-5d6b-9894-e9900e2452b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.202628] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1443.202853] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1443.203053] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Deleting the datastore file [datastore2] 850df4c3-3a92-47d3-973d-62f41d813f6c {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1443.203324] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17fa0672-fc9b-46fa-b923-fc0fbaed6523 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.209825] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Waiting for the task: (returnval){ [ 1443.209825] env[67424]: value = "task-3200028" [ 1443.209825] env[67424]: _type = "Task" [ 1443.209825] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.217959] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Task: {'id': task-3200028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.653014] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1443.653397] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating directory with path [datastore2] vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1443.653479] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95086161-ec6f-4826-9ad1-b0f8abd0426a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.664770] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Created directory with path [datastore2] vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1443.664948] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Fetch image to [datastore2] vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1443.665140] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1443.665841] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78037b8e-5ab0-4cdc-8b38-8c3af01e6e68 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.672071] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510924b9-02c3-4587-b82a-996a7907f862 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.680875] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fb6fbf-4d9d-47a3-8c34-3dba2012ef3f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.710330] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a854284-0c16-4374-a19c-93af74d4520d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.720535] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dd3f59ee-8b74-41b1-9ad7-5513e7836a9a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.722166] env[67424]: DEBUG oslo_vmware.api [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Task: {'id': task-3200028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07974} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.722395] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1443.722573] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1443.722744] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1443.722932] env[67424]: INFO nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1443.724985] env[67424]: DEBUG nova.compute.claims [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1443.725173] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.725388] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.743200] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1443.796141] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1443.859282] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1443.859501] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1444.016430] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd714f54-adc9-4f6c-902b-091f19744d81 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.025610] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdf4586-9b12-4076-9d88-b9045c0d7ad2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.054849] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f50480-46d7-4849-9f26-d789088b2d32 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.061840] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e66e4f1-5ba2-4635-bff6-635b40ecd66a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.074614] env[67424]: DEBUG nova.compute.provider_tree [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.083975] env[67424]: DEBUG nova.scheduler.client.report [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1444.097554] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.372s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.098085] env[67424]: ERROR nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1444.098085] env[67424]: Faults: ['InvalidArgument'] [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Traceback (most recent call last): [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self.driver.spawn(context, instance, image_meta, [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self._fetch_image_if_missing(context, vi) [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] image_cache(vi, tmp_image_ds_loc) [ 1444.098085] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] vm_util.copy_virtual_disk( [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] session._wait_for_task(vmdk_copy_task) [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] return self.wait_for_task(task_ref) [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] return evt.wait() [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] result = hub.switch() [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] return self.greenlet.switch() [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1444.098396] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] self.f(*self.args, **self.kw) [ 1444.098667] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1444.098667] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] raise exceptions.translate_fault(task_info.error) [ 1444.098667] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1444.098667] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Faults: ['InvalidArgument'] [ 1444.098667] env[67424]: ERROR nova.compute.manager [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] [ 1444.098773] env[67424]: DEBUG nova.compute.utils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1444.100223] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Build of instance 850df4c3-3a92-47d3-973d-62f41d813f6c was re-scheduled: A specified parameter was not correct: fileType [ 1444.100223] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1444.100598] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1444.100768] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1444.100940] env[67424]: DEBUG nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1444.101148] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1444.563798] env[67424]: DEBUG nova.network.neutron [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.580853] env[67424]: INFO nova.compute.manager [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Took 0.48 seconds to deallocate network for instance. [ 1444.684020] env[67424]: INFO nova.scheduler.client.report [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Deleted allocations for instance 850df4c3-3a92-47d3-973d-62f41d813f6c [ 1444.708334] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa63cfc2-15c2-4de9-80ab-91b47188359a tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 671.334s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.708334] env[67424]: DEBUG oslo_concurrency.lockutils [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 474.593s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.708334] env[67424]: DEBUG oslo_concurrency.lockutils [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Acquiring lock "850df4c3-3a92-47d3-973d-62f41d813f6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.708571] env[67424]: DEBUG oslo_concurrency.lockutils [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.708571] env[67424]: DEBUG oslo_concurrency.lockutils [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.711321] env[67424]: INFO nova.compute.manager [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Terminating instance [ 1444.713637] env[67424]: DEBUG nova.compute.manager [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1444.713979] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1444.714446] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6da39a1-7f91-408f-b59e-39046084c5c3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.719730] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1444.727137] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0424c996-f2a1-4b9f-be37-04a30fa01cd1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.760978] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 850df4c3-3a92-47d3-973d-62f41d813f6c could not be found. [ 1444.761180] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1444.761368] env[67424]: INFO nova.compute.manager [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1444.761615] env[67424]: DEBUG oslo.service.loopingcall [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.764199] env[67424]: DEBUG nova.compute.manager [-] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1444.764303] env[67424]: DEBUG nova.network.neutron [-] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1444.777907] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.778411] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.779639] env[67424]: INFO nova.compute.claims [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1444.791022] env[67424]: DEBUG nova.network.neutron [-] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.800153] env[67424]: INFO nova.compute.manager [-] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] Took 0.04 seconds to deallocate network for instance. [ 1444.895236] env[67424]: DEBUG oslo_concurrency.lockutils [None req-dd6c5f70-e154-4de3-8544-90c4e1a53ca0 tempest-ServerActionsTestJSON-850006308 tempest-ServerActionsTestJSON-850006308-project-member] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.896105] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 210.152s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.896256] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 850df4c3-3a92-47d3-973d-62f41d813f6c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1444.896431] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "850df4c3-3a92-47d3-973d-62f41d813f6c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.024657] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f78faf-cafa-4dec-85ea-3cc35af67749 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.032154] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5615b36-fe95-4196-813e-843fcb422a76 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.063204] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd834ec-3fd4-4a13-adae-fde4f7d13ac2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.070211] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50baf3c7-5b6a-4a49-98e7-0c699cf06c46 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.083094] env[67424]: DEBUG nova.compute.provider_tree [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.092075] env[67424]: DEBUG nova.scheduler.client.report [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1445.108019] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.108594] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1445.143197] env[67424]: DEBUG nova.compute.utils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1445.145239] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1445.145412] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1445.153651] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1445.204375] env[67424]: DEBUG nova.policy [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0a96f16a7bc4bed8c40fa13d189f151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05b9e6f225164ae2b9f0791afc2dc646', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1445.217873] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1445.243791] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1445.244044] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1445.244235] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1445.244432] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1445.244582] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1445.244730] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1445.244940] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1445.245121] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1445.245290] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1445.245455] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1445.245628] env[67424]: DEBUG nova.virt.hardware [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1445.246513] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca6dfbe-a5b8-4ade-952f-15792530749f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.254716] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7464d7-ef32-40af-9e29-e0ce019fae8e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.649520] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Successfully created port: 0141487b-d372-4eab-91aa-cde1b3cc40d6 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1446.460154] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Successfully updated port: 0141487b-d372-4eab-91aa-cde1b3cc40d6 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1446.470828] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "refresh_cache-ea1ca448-0e06-4548-80cd-9107b43eefe4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.470984] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquired lock "refresh_cache-ea1ca448-0e06-4548-80cd-9107b43eefe4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.471153] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1446.529932] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1447.030928] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Updating instance_info_cache with network_info: [{"id": "0141487b-d372-4eab-91aa-cde1b3cc40d6", "address": "fa:16:3e:3a:b4:fd", "network": {"id": "f9f14214-768e-4f1a-ad82-c88dcb337455", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-89423733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05b9e6f225164ae2b9f0791afc2dc646", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0141487b-d3", "ovs_interfaceid": "0141487b-d372-4eab-91aa-cde1b3cc40d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.045817] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Releasing lock "refresh_cache-ea1ca448-0e06-4548-80cd-9107b43eefe4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.046152] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Instance network_info: |[{"id": "0141487b-d372-4eab-91aa-cde1b3cc40d6", "address": "fa:16:3e:3a:b4:fd", "network": {"id": "f9f14214-768e-4f1a-ad82-c88dcb337455", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-89423733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05b9e6f225164ae2b9f0791afc2dc646", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0141487b-d3", "ovs_interfaceid": "0141487b-d372-4eab-91aa-cde1b3cc40d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1447.046576] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:b4:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0141487b-d372-4eab-91aa-cde1b3cc40d6', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1447.054212] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Creating folder: Project (05b9e6f225164ae2b9f0791afc2dc646). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1447.054815] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aadb4bf5-a602-4164-b1ba-dfb73ebd7fbf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.065187] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Created folder: Project (05b9e6f225164ae2b9f0791afc2dc646) in parent group-v639843. [ 1447.065372] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Creating folder: Instances. Parent ref: group-v639924. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1447.066055] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fd21ab0-c6a8-4709-9ff0-4b24b7f80e57 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.070931] env[67424]: DEBUG nova.compute.manager [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Received event network-vif-plugged-0141487b-d372-4eab-91aa-cde1b3cc40d6 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1447.071151] env[67424]: DEBUG oslo_concurrency.lockutils [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] Acquiring lock "ea1ca448-0e06-4548-80cd-9107b43eefe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.071355] env[67424]: DEBUG oslo_concurrency.lockutils [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.071524] env[67424]: DEBUG oslo_concurrency.lockutils [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.071691] env[67424]: DEBUG nova.compute.manager [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] No waiting events found dispatching network-vif-plugged-0141487b-d372-4eab-91aa-cde1b3cc40d6 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1447.071895] env[67424]: WARNING nova.compute.manager [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Received unexpected event network-vif-plugged-0141487b-d372-4eab-91aa-cde1b3cc40d6 for instance with vm_state building and task_state spawning. [ 1447.072237] env[67424]: DEBUG nova.compute.manager [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Received event network-changed-0141487b-d372-4eab-91aa-cde1b3cc40d6 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1447.072411] env[67424]: DEBUG nova.compute.manager [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Refreshing instance network info cache due to event network-changed-0141487b-d372-4eab-91aa-cde1b3cc40d6. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1447.072604] env[67424]: DEBUG oslo_concurrency.lockutils [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] Acquiring lock "refresh_cache-ea1ca448-0e06-4548-80cd-9107b43eefe4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.072730] env[67424]: DEBUG oslo_concurrency.lockutils [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] Acquired lock "refresh_cache-ea1ca448-0e06-4548-80cd-9107b43eefe4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.072888] env[67424]: DEBUG nova.network.neutron [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Refreshing network info cache for port 0141487b-d372-4eab-91aa-cde1b3cc40d6 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1447.075825] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Created folder: Instances in parent group-v639924. [ 1447.076064] env[67424]: DEBUG oslo.service.loopingcall [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.076252] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1447.076687] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd7f6d9e-7ba1-474d-afca-b4fd2ebda903 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.097909] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1447.097909] env[67424]: value = "task-3200031" [ 1447.097909] env[67424]: _type = "Task" [ 1447.097909] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.107540] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200031, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.530802] env[67424]: DEBUG nova.network.neutron [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Updated VIF entry in instance network info cache for port 0141487b-d372-4eab-91aa-cde1b3cc40d6. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1447.531179] env[67424]: DEBUG nova.network.neutron [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Updating instance_info_cache with network_info: [{"id": "0141487b-d372-4eab-91aa-cde1b3cc40d6", "address": "fa:16:3e:3a:b4:fd", "network": {"id": "f9f14214-768e-4f1a-ad82-c88dcb337455", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-89423733-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05b9e6f225164ae2b9f0791afc2dc646", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0141487b-d3", "ovs_interfaceid": "0141487b-d372-4eab-91aa-cde1b3cc40d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.540850] env[67424]: DEBUG oslo_concurrency.lockutils [req-9bbe291d-e4e3-4c47-a043-51ec351425c3 req-7de7c67c-6b0c-4b84-acfd-a4cb9044cb26 service nova] Releasing lock "refresh_cache-ea1ca448-0e06-4548-80cd-9107b43eefe4" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.608512] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200031, 'name': CreateVM_Task, 'duration_secs': 0.290858} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.608692] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1447.609380] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.609540] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.609973] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1447.610233] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5f53b37-2a9c-44cf-bd3e-b79838b2bb7a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.614735] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Waiting for the task: (returnval){ [ 1447.614735] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528d852a-d911-8b49-5849-88c86c5af05e" [ 1447.614735] env[67424]: _type = "Task" [ 1447.614735] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.627399] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528d852a-d911-8b49-5849-88c86c5af05e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.125536] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.125795] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1448.126012] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.387698] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.388632] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.388871] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1456.383398] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1456.386879] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.057561] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.057764] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.387652] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.387965] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.387326] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.400167] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.400598] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.400598] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.400745] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1458.401871] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92aaa161-afc2-4c13-8db8-929879b9b8fc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.410891] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49521d23-0970-4241-b43c-ad3bd2f51fe6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.425357] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd0649a-92d9-4a78-bb82-e2129a19cb8e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.431982] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299da592-f661-4002-8aea-b4c723dde13e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.462837] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180971MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1458.463092] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.463248] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.561436] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.561594] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.561720] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.561842] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.561961] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.562126] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.562265] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.562400] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.562523] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.562676] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1458.580224] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.595632] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0894ecdf-ae55-4d68-b7e4-35c3e3eeb789 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.611953] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2530f124-5c5f-419c-b258-30d0f40e0f89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.623416] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 02b8ee24-437c-4da9-877d-cddb3b83c235 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.638066] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f053a34a-f058-4c46-a525-fd01de9f8f57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.649864] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.663720] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1458.663968] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1458.664131] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1458.920688] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574863a1-4ae7-4a76-8c42-40f9cd0a0021 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.929160] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d711df2-fc6d-4af8-9074-c78ba4a250ab {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.963116] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240dae0b-a14c-4add-8698-19351999a0ea {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.971071] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0309a04c-b28f-456b-8784-c4a8947dca44 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.984019] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1458.995482] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1459.011753] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1459.011753] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.548s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.012410] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.633259] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.633505] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.655515] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "2cdeaa5b-d8ce-4083-9a3f-cd514d943143" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.655726] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "2cdeaa5b-d8ce-4083-9a3f-cd514d943143" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.383639] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1462.406691] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1462.406895] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1462.407030] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1462.426411] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.426537] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.426725] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.426897] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.427068] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.427205] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.427329] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.427451] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.427569] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.427686] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1462.427803] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1475.514142] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "31acf58b-8133-48e3-b942-2aa49a9cea6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.811664] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.043041] env[67424]: DEBUG oslo_concurrency.lockutils [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.103582] env[67424]: WARNING oslo_vmware.rw_handles [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1491.103582] env[67424]: ERROR oslo_vmware.rw_handles [ 1491.104187] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1491.106066] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1491.106307] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Copying Virtual Disk [datastore2] vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/0ef227ca-145b-47b1-bc3a-e11ce5766746/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1491.106588] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1a98854-73b3-44a5-a0c2-8c4f18529c84 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.114453] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1491.114453] env[67424]: value = "task-3200032" [ 1491.114453] env[67424]: _type = "Task" [ 1491.114453] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.122052] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': task-3200032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.625274] env[67424]: DEBUG oslo_vmware.exceptions [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1491.625563] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.626132] env[67424]: ERROR nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1491.626132] env[67424]: Faults: ['InvalidArgument'] [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Traceback (most recent call last): [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] yield resources [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self.driver.spawn(context, instance, image_meta, [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self._fetch_image_if_missing(context, vi) [ 1491.626132] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] image_cache(vi, tmp_image_ds_loc) [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] vm_util.copy_virtual_disk( [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] session._wait_for_task(vmdk_copy_task) [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] return self.wait_for_task(task_ref) [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] return evt.wait() [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] result = hub.switch() [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1491.626518] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] return self.greenlet.switch() [ 1491.626915] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1491.626915] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self.f(*self.args, **self.kw) [ 1491.626915] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1491.626915] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] raise exceptions.translate_fault(task_info.error) [ 1491.626915] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1491.626915] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Faults: ['InvalidArgument'] [ 1491.626915] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] [ 1491.626915] env[67424]: INFO nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Terminating instance [ 1491.628033] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.628236] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1491.628475] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edac1978-7d4a-4fe2-acd7-939779353efa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.630650] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1491.630832] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1491.631550] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466ff43b-c9b7-4075-8f28-ab83d4bdd865 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.639390] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1491.639390] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4b32111-cc12-4e7f-aa08-c96a020ca898 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.640693] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1491.640866] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1491.641819] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-664e405b-af53-4387-aec2-046d3d75a8e8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.646645] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Waiting for the task: (returnval){ [ 1491.646645] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52e46d11-8f81-7f58-e00a-dcbc0da6bddd" [ 1491.646645] env[67424]: _type = "Task" [ 1491.646645] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.653904] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52e46d11-8f81-7f58-e00a-dcbc0da6bddd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.702211] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1491.702490] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1491.702772] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Deleting the datastore file [datastore2] 2489aa3d-1973-4ede-9cae-dab971fa4a7c {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1491.703054] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f44a23d6-cfb7-41ca-8bc9-4429a067e107 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.709224] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1491.709224] env[67424]: value = "task-3200034" [ 1491.709224] env[67424]: _type = "Task" [ 1491.709224] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.716686] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': task-3200034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.156907] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1492.157297] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Creating directory with path [datastore2] vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.157553] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0aa05f86-c5cf-4a22-b6af-c8c70a6d8a8a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.168284] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Created directory with path [datastore2] vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1492.168466] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Fetch image to [datastore2] vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1492.168630] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1492.169330] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d9e638-ab66-461f-b111-a7521b177860 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.175523] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60328ee6-95bf-4600-be2d-2cc756ede9ee {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.185093] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcedd15-ce2c-439e-895e-93073c7f559d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.216822] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393c2efc-6635-4856-99b0-693cc53dc32a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.223343] env[67424]: DEBUG oslo_vmware.api [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': task-3200034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074595} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.224704] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1492.224889] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1492.225071] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1492.225246] env[67424]: INFO nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1492.226939] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a638369b-201c-47a0-b69d-9979c1496f7f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.228742] env[67424]: DEBUG nova.compute.claims [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1492.228913] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.229132] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.253025] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1492.305657] env[67424]: DEBUG oslo_vmware.rw_handles [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1492.368263] env[67424]: DEBUG oslo_vmware.rw_handles [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1492.368491] env[67424]: DEBUG oslo_vmware.rw_handles [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1492.555124] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3b6cf3-feb6-475c-aba6-b49193d82850 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.562916] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eeb8392-c739-4a6b-a22b-be00cf599ea0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.594248] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f184219-0567-4882-b5d2-50f4b51d5680 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.601342] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c77158f-446b-443e-b56b-273bd1920fda {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.614833] env[67424]: DEBUG nova.compute.provider_tree [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.623116] env[67424]: DEBUG nova.scheduler.client.report [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1492.637378] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.408s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.637876] env[67424]: ERROR nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1492.637876] env[67424]: Faults: ['InvalidArgument'] [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Traceback (most recent call last): [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self.driver.spawn(context, instance, image_meta, [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self._fetch_image_if_missing(context, vi) [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] image_cache(vi, tmp_image_ds_loc) [ 1492.637876] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] vm_util.copy_virtual_disk( [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] session._wait_for_task(vmdk_copy_task) [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] return self.wait_for_task(task_ref) [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] return evt.wait() [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] result = hub.switch() [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] return self.greenlet.switch() [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1492.638251] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] self.f(*self.args, **self.kw) [ 1492.638581] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1492.638581] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] raise exceptions.translate_fault(task_info.error) [ 1492.638581] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1492.638581] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Faults: ['InvalidArgument'] [ 1492.638581] env[67424]: ERROR nova.compute.manager [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] [ 1492.638581] env[67424]: DEBUG nova.compute.utils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1492.639946] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Build of instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c was re-scheduled: A specified parameter was not correct: fileType [ 1492.639946] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1492.640333] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1492.640506] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1492.640743] env[67424]: DEBUG nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1492.640938] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1493.084504] env[67424]: DEBUG nova.network.neutron [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.099038] env[67424]: INFO nova.compute.manager [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Took 0.46 seconds to deallocate network for instance. [ 1493.195093] env[67424]: INFO nova.scheduler.client.report [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Deleted allocations for instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c [ 1493.218105] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7ca049cb-20a0-4395-b19b-ef4c8a22f0df tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 591.395s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.219169] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 393.760s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.219391] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.219593] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.219760] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.221824] env[67424]: INFO nova.compute.manager [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Terminating instance [ 1493.223475] env[67424]: DEBUG nova.compute.manager [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1493.223898] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1493.224159] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39fc9c0a-2db6-45fc-b7d3-1d4dda9ef3ec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.233703] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049c108b-6a1d-469d-a5b2-6c6f83bf6d49 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.244419] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1493.268530] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2489aa3d-1973-4ede-9cae-dab971fa4a7c could not be found. [ 1493.268820] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1493.269077] env[67424]: INFO nova.compute.manager [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1493.269410] env[67424]: DEBUG oslo.service.loopingcall [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.269709] env[67424]: DEBUG nova.compute.manager [-] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1493.269838] env[67424]: DEBUG nova.network.neutron [-] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1493.296953] env[67424]: DEBUG nova.network.neutron [-] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.301715] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.302035] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.304160] env[67424]: INFO nova.compute.claims [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1493.310107] env[67424]: INFO nova.compute.manager [-] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] Took 0.04 seconds to deallocate network for instance. [ 1493.391282] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53256502-ab8c-4fd8-bc85-78f6a61edfd5 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.392547] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 258.648s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.392932] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 2489aa3d-1973-4ede-9cae-dab971fa4a7c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1493.393197] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "2489aa3d-1973-4ede-9cae-dab971fa4a7c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.555031] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10af419-dc1f-49a4-9548-2a7d4fb7f516 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.562977] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effa898e-b720-4e43-9538-c2e02090d532 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.604754] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead025b4-5f4d-4dbc-bb59-ba3ed757814b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.612282] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f1ccd8-7c0d-49c1-848e-e8306d60ec21 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.627528] env[67424]: DEBUG nova.compute.provider_tree [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.642125] env[67424]: DEBUG nova.scheduler.client.report [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1493.660340] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.358s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.661195] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1493.695388] env[67424]: DEBUG nova.compute.utils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1493.697190] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1493.697511] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1493.709482] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1493.781023] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1493.786026] env[67424]: DEBUG nova.policy [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66fa16dd270942dba4b94fa0309729ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '689a0b14ff0b42ad99141803bcb23266', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1493.807545] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1493.807849] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1493.808081] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1493.808316] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1493.808531] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1493.808696] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1493.808911] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1493.809146] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1493.809469] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1493.809681] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1493.809939] env[67424]: DEBUG nova.virt.hardware [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1493.811400] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec24da8-4674-4bfe-94a4-8e4dc9a1a297 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.819901] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b131e79-7956-47e1-a476-f07b44ddfe29 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.416801] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Successfully created port: c15e82ef-ec60-4bad-af34-c2478cf0aecc {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1495.125272] env[67424]: DEBUG nova.compute.manager [req-55445edb-ffcd-433c-bf56-533ea6c86986 req-55bbe10c-0887-4e57-b5d1-858516b45cd7 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Received event network-vif-plugged-c15e82ef-ec60-4bad-af34-c2478cf0aecc {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1495.126023] env[67424]: DEBUG oslo_concurrency.lockutils [req-55445edb-ffcd-433c-bf56-533ea6c86986 req-55bbe10c-0887-4e57-b5d1-858516b45cd7 service nova] Acquiring lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.126593] env[67424]: DEBUG oslo_concurrency.lockutils [req-55445edb-ffcd-433c-bf56-533ea6c86986 req-55bbe10c-0887-4e57-b5d1-858516b45cd7 service nova] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.126862] env[67424]: DEBUG oslo_concurrency.lockutils [req-55445edb-ffcd-433c-bf56-533ea6c86986 req-55bbe10c-0887-4e57-b5d1-858516b45cd7 service nova] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.127187] env[67424]: DEBUG nova.compute.manager [req-55445edb-ffcd-433c-bf56-533ea6c86986 req-55bbe10c-0887-4e57-b5d1-858516b45cd7 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] No waiting events found dispatching network-vif-plugged-c15e82ef-ec60-4bad-af34-c2478cf0aecc {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1495.127374] env[67424]: WARNING nova.compute.manager [req-55445edb-ffcd-433c-bf56-533ea6c86986 req-55bbe10c-0887-4e57-b5d1-858516b45cd7 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Received unexpected event network-vif-plugged-c15e82ef-ec60-4bad-af34-c2478cf0aecc for instance with vm_state building and task_state spawning. [ 1495.177422] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.227076] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Successfully updated port: c15e82ef-ec60-4bad-af34-c2478cf0aecc {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1495.241057] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.241250] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.241384] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1495.306897] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1495.569269] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Updating instance_info_cache with network_info: [{"id": "c15e82ef-ec60-4bad-af34-c2478cf0aecc", "address": "fa:16:3e:cf:ac:57", "network": {"id": "f8e9d403-2dbb-40b5-8e1d-ea150b305c64", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-995680307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689a0b14ff0b42ad99141803bcb23266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc15e82ef-ec", "ovs_interfaceid": "c15e82ef-ec60-4bad-af34-c2478cf0aecc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.583988] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Releasing lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.584253] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance network_info: |[{"id": "c15e82ef-ec60-4bad-af34-c2478cf0aecc", "address": "fa:16:3e:cf:ac:57", "network": {"id": "f8e9d403-2dbb-40b5-8e1d-ea150b305c64", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-995680307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689a0b14ff0b42ad99141803bcb23266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc15e82ef-ec", "ovs_interfaceid": "c15e82ef-ec60-4bad-af34-c2478cf0aecc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1495.584656] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:ac:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9bb629cd-6d0f-4bed-965c-bd04a2f3ec49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c15e82ef-ec60-4bad-af34-c2478cf0aecc', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1495.592155] env[67424]: DEBUG oslo.service.loopingcall [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.592993] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1495.592993] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3ae8c30-8b32-475f-bb47-4645a2463b73 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.613673] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1495.613673] env[67424]: value = "task-3200035" [ 1495.613673] env[67424]: _type = "Task" [ 1495.613673] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.621577] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200035, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.124365] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200035, 'name': CreateVM_Task, 'duration_secs': 0.281653} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.124573] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1496.125298] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.125487] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.125825] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1496.126113] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bfd4f86-5283-4e34-8029-86652ec0115e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.130512] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1496.130512] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5299fe0a-150e-91ad-3d6c-23ddf77bd4e6" [ 1496.130512] env[67424]: _type = "Task" [ 1496.130512] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.138263] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5299fe0a-150e-91ad-3d6c-23ddf77bd4e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.643364] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.643684] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1496.643885] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.280079] env[67424]: DEBUG nova.compute.manager [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Received event network-changed-c15e82ef-ec60-4bad-af34-c2478cf0aecc {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1497.280288] env[67424]: DEBUG nova.compute.manager [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Refreshing instance network info cache due to event network-changed-c15e82ef-ec60-4bad-af34-c2478cf0aecc. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1497.280503] env[67424]: DEBUG oslo_concurrency.lockutils [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] Acquiring lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.280643] env[67424]: DEBUG oslo_concurrency.lockutils [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] Acquired lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.280809] env[67424]: DEBUG nova.network.neutron [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Refreshing network info cache for port c15e82ef-ec60-4bad-af34-c2478cf0aecc {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1497.541831] env[67424]: DEBUG nova.network.neutron [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Updated VIF entry in instance network info cache for port c15e82ef-ec60-4bad-af34-c2478cf0aecc. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1497.542188] env[67424]: DEBUG nova.network.neutron [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Updating instance_info_cache with network_info: [{"id": "c15e82ef-ec60-4bad-af34-c2478cf0aecc", "address": "fa:16:3e:cf:ac:57", "network": {"id": "f8e9d403-2dbb-40b5-8e1d-ea150b305c64", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-995680307-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689a0b14ff0b42ad99141803bcb23266", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9bb629cd-6d0f-4bed-965c-bd04a2f3ec49", "external-id": "nsx-vlan-transportzone-848", "segmentation_id": 848, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc15e82ef-ec", "ovs_interfaceid": "c15e82ef-ec60-4bad-af34-c2478cf0aecc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.551983] env[67424]: DEBUG oslo_concurrency.lockutils [req-aa91bfc4-db7b-498e-a5ab-9bcdeee35046 req-847699fd-224c-4de2-a5b8-d4341c9e9122 service nova] Releasing lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.592624] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.592992] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.031136] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d99cd2ac-50fd-4dbf-8075-6087ffb051f4 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "251ac24f-18d2-42e6-ba08-87ca676f9261" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.031383] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d99cd2ac-50fd-4dbf-8075-6087ffb051f4 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "251ac24f-18d2-42e6-ba08-87ca676f9261" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.388453] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.398611] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.398934] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances with incomplete migration {{(pid=67424) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1511.397569] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1511.397766] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1511.409451] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] There are 0 instances to clean {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1513.400834] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.387855] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.387997] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1516.383773] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1517.387679] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1517.929166] env[67424]: DEBUG oslo_concurrency.lockutils [None req-699f4f2a-d65b-41af-b511-d086df2379e2 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Acquiring lock "1ce795f5-58c4-4f28-9ae5-07c5dad82c2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.929401] env[67424]: DEBUG oslo_concurrency.lockutils [None req-699f4f2a-d65b-41af-b511-d086df2379e2 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Lock "1ce795f5-58c4-4f28-9ae5-07c5dad82c2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.388224] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1518.388606] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1519.387933] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1519.402763] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.403233] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.403331] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.403558] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1519.405204] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01948806-2b54-4cdb-a8fa-e1e46000f106 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.416976] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b857771-ae56-4afd-a8ad-24a3999617c8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.439145] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04db671-e8f9-47e8-82b0-d9e95cf1fd71 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.449173] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcac40c-31ea-45a5-9d82-66fa9b2d39b8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.487046] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181008MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1519.487046] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.487046] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.680007] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.680184] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.680314] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.680437] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.680553] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.680682] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.680819] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.680938] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.681161] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.681405] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.694244] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f053a34a-f058-4c46-a525-fd01de9f8f57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.705032] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.717107] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.729878] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.741474] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2cdeaa5b-d8ce-4083-9a3f-cd514d943143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.754838] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.766303] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 251ac24f-18d2-42e6-ba08-87ca676f9261 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.777617] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 1ce795f5-58c4-4f28-9ae5-07c5dad82c2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.778922] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1519.778922] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1520.004132] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f461e2-9653-48a6-9f70-5004c2a4d115 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.011686] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07304f04-0ec8-4ba1-b831-85d25580d78a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.040501] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697130a6-13cb-4b7e-8cb1-0ae5b6a19941 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.047837] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3046c916-b059-4090-a087-3387f39e4cfd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.060641] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1520.069218] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1520.082011] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1520.082202] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.595s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.082139] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1524.388054] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1524.388054] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1524.388466] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1524.411860] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.411860] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.411860] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.412069] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.413295] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.413295] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.413295] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.413295] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.413295] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.413605] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1524.413605] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1533.293539] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9818767a-2b57-4ddf-a88d-ae28030a3135 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Acquiring lock "dc0e72cb-20af-4116-86a4-94b464272cff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.293810] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9818767a-2b57-4ddf-a88d-ae28030a3135 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "dc0e72cb-20af-4116-86a4-94b464272cff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.631828] env[67424]: WARNING oslo_vmware.rw_handles [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1537.631828] env[67424]: ERROR oslo_vmware.rw_handles [ 1537.632576] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1537.634263] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1537.634535] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Copying Virtual Disk [datastore2] vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/a6e063d5-3b6b-496d-a114-054c8b27b3a2/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1537.634815] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dd3e1b0-7060-41cc-bb15-8a5b1ced7da3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.644843] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Waiting for the task: (returnval){ [ 1537.644843] env[67424]: value = "task-3200036" [ 1537.644843] env[67424]: _type = "Task" [ 1537.644843] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.652358] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Task: {'id': task-3200036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.155368] env[67424]: DEBUG oslo_vmware.exceptions [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1538.155662] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.156216] env[67424]: ERROR nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1538.156216] env[67424]: Faults: ['InvalidArgument'] [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Traceback (most recent call last): [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] yield resources [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self.driver.spawn(context, instance, image_meta, [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self._fetch_image_if_missing(context, vi) [ 1538.156216] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] image_cache(vi, tmp_image_ds_loc) [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] vm_util.copy_virtual_disk( [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] session._wait_for_task(vmdk_copy_task) [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] return self.wait_for_task(task_ref) [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] return evt.wait() [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] result = hub.switch() [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1538.156573] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] return self.greenlet.switch() [ 1538.156976] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1538.156976] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self.f(*self.args, **self.kw) [ 1538.156976] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1538.156976] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] raise exceptions.translate_fault(task_info.error) [ 1538.156976] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1538.156976] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Faults: ['InvalidArgument'] [ 1538.156976] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] [ 1538.156976] env[67424]: INFO nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Terminating instance [ 1538.158196] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.158283] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1538.158977] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1538.159093] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1538.159329] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7fdc71fd-c622-4a7c-ac2f-2bafc8e601a9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.161905] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95742cc3-3320-4657-b17c-eac965b5abbc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.168614] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1538.168832] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7da0f78-9897-4ffb-a534-fc9df2022576 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.171032] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1538.171218] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1538.172189] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e1d1644-181b-4849-a4b0-33d1d74200ca {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.179023] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 1538.179023] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52fd4c29-89b8-1f82-26cb-14fecf8044ef" [ 1538.179023] env[67424]: _type = "Task" [ 1538.179023] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.191373] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1538.191618] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating directory with path [datastore2] vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1538.191830] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a40065d9-d275-494d-ad64-faa60ea5583c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.213493] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Created directory with path [datastore2] vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1538.213743] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Fetch image to [datastore2] vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1538.213932] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1538.214737] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85118301-7a12-4327-b717-32857900e3f3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.221378] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aef6dbf-1b48-48c3-ab42-7d62dee29632 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.230260] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2777d2-2502-4846-8676-b00aa622fa71 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.261550] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507bc620-fa2c-4ba2-b1b9-f786f76f19f1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.264226] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1538.264427] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1538.264602] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Deleting the datastore file [datastore2] 3e4e39f2-9267-4076-a302-d5210cb3d5ff {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1538.265455] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f562b50-f832-4ec4-be16-e183039888e7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.270639] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e909ad70-2c27-437e-9c5d-1fe6462ace29 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.272430] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Waiting for the task: (returnval){ [ 1538.272430] env[67424]: value = "task-3200038" [ 1538.272430] env[67424]: _type = "Task" [ 1538.272430] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.280848] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Task: {'id': task-3200038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.292060] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1538.345122] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1538.407910] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1538.408227] env[67424]: DEBUG oslo_vmware.rw_handles [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1538.782167] env[67424]: DEBUG oslo_vmware.api [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Task: {'id': task-3200038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0653} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.782521] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1538.782581] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1538.782848] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1538.782944] env[67424]: INFO nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1538.785049] env[67424]: DEBUG nova.compute.claims [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1538.785224] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.785435] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.027599] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92726b80-f7ad-4609-bdb9-0592df7d4740 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.035076] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a5c14b-95a5-4cc9-be5b-3602d38b7bd9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.064241] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fcc5d3-4a6c-4684-a7f5-4898e4d34dc6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.070975] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107e7548-284a-4967-9f77-78b9cc72eb50 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.083779] env[67424]: DEBUG nova.compute.provider_tree [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1539.093205] env[67424]: DEBUG nova.scheduler.client.report [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1539.108051] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.108224] env[67424]: ERROR nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1539.108224] env[67424]: Faults: ['InvalidArgument'] [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Traceback (most recent call last): [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self.driver.spawn(context, instance, image_meta, [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self._fetch_image_if_missing(context, vi) [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] image_cache(vi, tmp_image_ds_loc) [ 1539.108224] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] vm_util.copy_virtual_disk( [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] session._wait_for_task(vmdk_copy_task) [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] return self.wait_for_task(task_ref) [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] return evt.wait() [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] result = hub.switch() [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] return self.greenlet.switch() [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1539.108580] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] self.f(*self.args, **self.kw) [ 1539.108931] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1539.108931] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] raise exceptions.translate_fault(task_info.error) [ 1539.108931] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1539.108931] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Faults: ['InvalidArgument'] [ 1539.108931] env[67424]: ERROR nova.compute.manager [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] [ 1539.109086] env[67424]: DEBUG nova.compute.utils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1539.110502] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Build of instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff was re-scheduled: A specified parameter was not correct: fileType [ 1539.110502] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1539.110874] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1539.111058] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1539.111243] env[67424]: DEBUG nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1539.111427] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1539.751888] env[67424]: DEBUG nova.network.neutron [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.766973] env[67424]: INFO nova.compute.manager [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Took 0.66 seconds to deallocate network for instance. [ 1539.857362] env[67424]: INFO nova.scheduler.client.report [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Deleted allocations for instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff [ 1539.880036] env[67424]: DEBUG oslo_concurrency.lockutils [None req-96882666-e839-4327-a418-8de9d3147887 tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 636.735s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.880927] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.350s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.881460] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Acquiring lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.881460] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.881656] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.883974] env[67424]: INFO nova.compute.manager [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Terminating instance [ 1539.885675] env[67424]: DEBUG nova.compute.manager [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1539.885911] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1539.886414] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6af8ab5c-0e2f-4514-ba3d-acccc3ccdfbe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.896016] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6b6bd6-440b-4487-9d00-33ef7f7c270e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.906770] env[67424]: DEBUG nova.compute.manager [None req-a16a39c7-777e-4325-bcb9-1faa7fdc0f7d tempest-ServerActionsTestOtherA-1713597554 tempest-ServerActionsTestOtherA-1713597554-project-member] [instance: 0894ecdf-ae55-4d68-b7e4-35c3e3eeb789] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1539.928164] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3e4e39f2-9267-4076-a302-d5210cb3d5ff could not be found. [ 1539.928373] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1539.928545] env[67424]: INFO nova.compute.manager [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1539.928786] env[67424]: DEBUG oslo.service.loopingcall [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.929032] env[67424]: DEBUG nova.compute.manager [-] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1539.929133] env[67424]: DEBUG nova.network.neutron [-] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1539.931430] env[67424]: DEBUG nova.compute.manager [None req-a16a39c7-777e-4325-bcb9-1faa7fdc0f7d tempest-ServerActionsTestOtherA-1713597554 tempest-ServerActionsTestOtherA-1713597554-project-member] [instance: 0894ecdf-ae55-4d68-b7e4-35c3e3eeb789] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1539.953441] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a16a39c7-777e-4325-bcb9-1faa7fdc0f7d tempest-ServerActionsTestOtherA-1713597554 tempest-ServerActionsTestOtherA-1713597554-project-member] Lock "0894ecdf-ae55-4d68-b7e4-35c3e3eeb789" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.329s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.957933] env[67424]: DEBUG nova.network.neutron [-] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.961789] env[67424]: DEBUG nova.compute.manager [None req-53b6c4ad-314d-486c-9280-3db96c269d82 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 2530f124-5c5f-419c-b258-30d0f40e0f89] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1539.965340] env[67424]: INFO nova.compute.manager [-] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] Took 0.04 seconds to deallocate network for instance. [ 1539.985623] env[67424]: DEBUG nova.compute.manager [None req-53b6c4ad-314d-486c-9280-3db96c269d82 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 2530f124-5c5f-419c-b258-30d0f40e0f89] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1540.004470] env[67424]: DEBUG oslo_concurrency.lockutils [None req-53b6c4ad-314d-486c-9280-3db96c269d82 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "2530f124-5c5f-419c-b258-30d0f40e0f89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.201s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.019096] env[67424]: DEBUG nova.compute.manager [None req-17e1cefc-c0cc-49e3-996d-018e087b9770 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] [instance: 02b8ee24-437c-4da9-877d-cddb3b83c235] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1540.066029] env[67424]: DEBUG nova.compute.manager [None req-17e1cefc-c0cc-49e3-996d-018e087b9770 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] [instance: 02b8ee24-437c-4da9-877d-cddb3b83c235] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1540.069622] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c5521ae5-9109-4538-b7fc-9dcacb0e710a tempest-InstanceActionsTestJSON-99928645 tempest-InstanceActionsTestJSON-99928645-project-member] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.070430] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 305.326s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.070614] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 3e4e39f2-9267-4076-a302-d5210cb3d5ff] During sync_power_state the instance has a pending task (deleting). Skip. [ 1540.070787] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "3e4e39f2-9267-4076-a302-d5210cb3d5ff" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.085628] env[67424]: DEBUG oslo_concurrency.lockutils [None req-17e1cefc-c0cc-49e3-996d-018e087b9770 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Lock "02b8ee24-437c-4da9-877d-cddb3b83c235" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.993s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.093907] env[67424]: DEBUG nova.compute.manager [None req-4116437c-22d8-4b27-bf9e-5fa0e14d0bf2 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: f053a34a-f058-4c46-a525-fd01de9f8f57] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1540.115990] env[67424]: DEBUG nova.compute.manager [None req-4116437c-22d8-4b27-bf9e-5fa0e14d0bf2 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: f053a34a-f058-4c46-a525-fd01de9f8f57] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1540.136385] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4116437c-22d8-4b27-bf9e-5fa0e14d0bf2 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "f053a34a-f058-4c46-a525-fd01de9f8f57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.406s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.156153] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1540.204316] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.204316] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.205753] env[67424]: INFO nova.compute.claims [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1540.448119] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b79aab3-cf95-4c53-803a-f17d6b56a856 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.455732] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd2470b-50b9-4674-aeba-9519f782ec58 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.485653] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f74b3e-e7c4-4811-9632-c76cc9e02733 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.492819] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71e75d3-897e-427f-b5b4-8bb1579e0bde {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.505787] env[67424]: DEBUG nova.compute.provider_tree [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.514289] env[67424]: DEBUG nova.scheduler.client.report [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1540.528021] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.324s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.528420] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1540.559885] env[67424]: DEBUG nova.compute.utils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1540.561330] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Not allocating networking since 'none' was specified. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1540.569665] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1540.627878] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1540.652978] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1540.653236] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1540.653393] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1540.653644] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1540.653814] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1540.653968] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1540.654192] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1540.654352] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1540.654760] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1540.654992] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1540.655206] env[67424]: DEBUG nova.virt.hardware [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1540.658950] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3548eb42-cf0e-40f2-a932-cf6c2af0c7d6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.664771] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d6367d-0eef-4dd8-a83d-621624cceaed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.679186] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance VIF info [] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1540.685275] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Creating folder: Project (436b3e65082740d7989bdddf2cdd4467). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1540.685553] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c19ff12a-a7a4-4b7c-8c98-ce35fb1abccf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.695683] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Created folder: Project (436b3e65082740d7989bdddf2cdd4467) in parent group-v639843. [ 1540.695842] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Creating folder: Instances. Parent ref: group-v639928. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1540.696398] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d6faad1-0d74-46a6-b407-b4ee84322e7d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.704580] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Created folder: Instances in parent group-v639928. [ 1540.704819] env[67424]: DEBUG oslo.service.loopingcall [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.705011] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1540.705208] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e88f6a6-3aa8-424b-99dd-9d7711b7969e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.721597] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1540.721597] env[67424]: value = "task-3200041" [ 1540.721597] env[67424]: _type = "Task" [ 1540.721597] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.729083] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200041, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.231453] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200041, 'name': CreateVM_Task} progress is 99%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.732672] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200041, 'name': CreateVM_Task} progress is 99%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.234064] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200041, 'name': CreateVM_Task, 'duration_secs': 1.242537} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.234064] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1542.234064] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.234064] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.234536] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1542.234645] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99448791-5afc-4a79-a0db-e8f3e5d4eee4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.238805] env[67424]: DEBUG oslo_vmware.api [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Waiting for the task: (returnval){ [ 1542.238805] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]526b6e14-df06-6815-cb7f-567c9f1c8194" [ 1542.238805] env[67424]: _type = "Task" [ 1542.238805] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.245984] env[67424]: DEBUG oslo_vmware.api [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]526b6e14-df06-6815-cb7f-567c9f1c8194, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.749233] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.749578] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1542.749832] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.116204] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "afd0f239-0752-4e2e-a232-9f22722753f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.387544] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.387874] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1575.389631] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.387616] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.383598] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1579.387635] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.388011] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.388011] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1580.400450] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.400689] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.400860] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.401029] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1580.402217] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e0ba88-4e24-4244-8408-b4ed67b104ce {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.412668] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc4da4c-22a6-4111-b784-cb080073c3ec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.426977] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601a2ee1-d575-40a1-9c71-5844f9080371 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.433361] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815ee2ce-913a-4d9c-a5d2-fd565c8b76b6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.462058] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181009MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1580.462058] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.462058] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.539437] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.539599] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.539725] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.539844] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.539961] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.540083] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.540197] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.540308] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.540420] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.540529] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1580.551486] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.561421] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.571222] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2cdeaa5b-d8ce-4083-9a3f-cd514d943143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.580849] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.589563] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 251ac24f-18d2-42e6-ba08-87ca676f9261 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.598348] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 1ce795f5-58c4-4f28-9ae5-07c5dad82c2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.607468] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance dc0e72cb-20af-4116-86a4-94b464272cff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.607701] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1580.607844] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1580.812902] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a590e721-55b2-4980-930c-5435836e490d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.821919] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533695a3-9a4f-49a0-8a22-e477c5a1e7d3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.853832] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087c0b34-d24a-4379-9009-96d971f5e7b4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.861114] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa44401-006c-41f7-bbe3-01b862fbd9b0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.874180] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.883626] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1580.898961] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1580.899187] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.437s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.900610] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1583.897195] env[67424]: WARNING oslo_vmware.rw_handles [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1583.897195] env[67424]: ERROR oslo_vmware.rw_handles [ 1583.897632] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1583.899941] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1583.900233] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Copying Virtual Disk [datastore2] vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/d1d61980-907f-46f4-8ad7-98bc38eac3e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1583.900584] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71e3ef7c-b648-48fb-b54b-cb353416d028 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.908223] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 1583.908223] env[67424]: value = "task-3200042" [ 1583.908223] env[67424]: _type = "Task" [ 1583.908223] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.915808] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': task-3200042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.225540] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1586.225540] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1586.225540] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1586.238577] env[67424]: DEBUG oslo_vmware.exceptions [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1586.238930] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.239433] env[67424]: ERROR nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1586.239433] env[67424]: Faults: ['InvalidArgument'] [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] yield resources [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.driver.spawn(context, instance, image_meta, [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._fetch_image_if_missing(context, vi) [ 1586.239433] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] image_cache(vi, tmp_image_ds_loc) [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] vm_util.copy_virtual_disk( [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] session._wait_for_task(vmdk_copy_task) [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.wait_for_task(task_ref) [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return evt.wait() [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] result = hub.switch() [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1586.239804] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.greenlet.switch() [ 1586.240359] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1586.240359] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.f(*self.args, **self.kw) [ 1586.240359] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1586.240359] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise exceptions.translate_fault(task_info.error) [ 1586.240359] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1586.240359] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Faults: ['InvalidArgument'] [ 1586.240359] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1586.240359] env[67424]: INFO nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Terminating instance [ 1586.241795] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.241884] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1586.242491] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1586.242809] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1586.242900] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bd9fe49-db5c-4687-b799-6558e9689a10 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.245278] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155fa6f7-da53-461a-879d-36be9bd4d168 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.250425] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.250570] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.250699] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.250825] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.250945] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.251076] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.251196] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.251314] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.251431] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.251545] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1586.251664] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1586.256703] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1586.256927] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42b7c4f8-cf54-429e-b76f-3324454c4d7d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.264589] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1586.264755] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1586.265468] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4e4665d-8056-49de-a6f6-56784a23bc43 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.272348] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Waiting for the task: (returnval){ [ 1586.272348] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]520e2710-434d-9d3b-337e-0313f74bad63" [ 1586.272348] env[67424]: _type = "Task" [ 1586.272348] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.279936] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]520e2710-434d-9d3b-337e-0313f74bad63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.782624] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1586.782890] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Creating directory with path [datastore2] vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1586.783170] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9bfecb6-ff60-4636-a00a-c602814276f6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.806631] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Created directory with path [datastore2] vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1586.806824] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Fetch image to [datastore2] vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1586.806982] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1586.807714] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12464c4-a6b3-4260-b8a9-b162e23e18a0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.814246] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726fb5e7-dab3-4d05-9da4-108644345a06 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.823144] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8083482-2792-4a03-a3cc-8c0ae4bd274e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.853512] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1207e041-aca9-433e-a14b-e3ac96d83eb4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.858921] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3eb5f793-4053-45fd-8206-ec31e215bf8d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.879424] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1587.041087] env[67424]: DEBUG oslo_vmware.rw_handles [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1587.101473] env[67424]: DEBUG oslo_vmware.rw_handles [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1587.101673] env[67424]: DEBUG oslo_vmware.rw_handles [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1587.411231] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1587.686558] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1587.686861] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1587.686970] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Deleting the datastore file [datastore2] 4e370013-5dfb-467c-8709-c0a0b256a9aa {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1587.687263] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b567e85-695d-4983-a1ad-fb232fda9932 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.693076] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for the task: (returnval){ [ 1587.693076] env[67424]: value = "task-3200044" [ 1587.693076] env[67424]: _type = "Task" [ 1587.693076] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.700840] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': task-3200044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.204640] env[67424]: DEBUG oslo_vmware.api [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Task: {'id': task-3200044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079815} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.204894] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1588.205090] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1588.205286] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1588.205482] env[67424]: INFO nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Took 1.96 seconds to destroy the instance on the hypervisor. [ 1588.207725] env[67424]: DEBUG nova.compute.claims [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1588.207900] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.208134] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.451405] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf15926-1dd8-4037-b5a2-271b82c82f82 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.459224] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ea5302-dd20-461b-8278-223de6148566 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.489198] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4102a91-6000-4c53-b888-8e9c0e89c48a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.496506] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b076f453-5452-4515-95b0-6c5cee0d65f4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.509328] env[67424]: DEBUG nova.compute.provider_tree [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1588.518429] env[67424]: DEBUG nova.scheduler.client.report [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1588.531633] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.323s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.532140] env[67424]: ERROR nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1588.532140] env[67424]: Faults: ['InvalidArgument'] [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.driver.spawn(context, instance, image_meta, [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._fetch_image_if_missing(context, vi) [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] image_cache(vi, tmp_image_ds_loc) [ 1588.532140] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] vm_util.copy_virtual_disk( [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] session._wait_for_task(vmdk_copy_task) [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.wait_for_task(task_ref) [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return evt.wait() [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] result = hub.switch() [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.greenlet.switch() [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1588.532577] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.f(*self.args, **self.kw) [ 1588.532957] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1588.532957] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise exceptions.translate_fault(task_info.error) [ 1588.532957] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1588.532957] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Faults: ['InvalidArgument'] [ 1588.532957] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.532957] env[67424]: DEBUG nova.compute.utils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1588.534123] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Build of instance 4e370013-5dfb-467c-8709-c0a0b256a9aa was re-scheduled: A specified parameter was not correct: fileType [ 1588.534123] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1588.534541] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1588.534708] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1588.534859] env[67424]: DEBUG nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1588.535030] env[67424]: DEBUG nova.network.neutron [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1588.673903] env[67424]: DEBUG neutronclient.v2_0.client [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67424) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1588.676007] env[67424]: ERROR nova.compute.manager [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.driver.spawn(context, instance, image_meta, [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._fetch_image_if_missing(context, vi) [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] image_cache(vi, tmp_image_ds_loc) [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1588.676007] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] vm_util.copy_virtual_disk( [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] session._wait_for_task(vmdk_copy_task) [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.wait_for_task(task_ref) [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return evt.wait() [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] result = hub.switch() [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.greenlet.switch() [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.f(*self.args, **self.kw) [ 1588.676397] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise exceptions.translate_fault(task_info.error) [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Faults: ['InvalidArgument'] [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] During handling of the above exception, another exception occurred: [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._build_and_run_instance(context, instance, image, [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise exception.RescheduledException( [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] nova.exception.RescheduledException: Build of instance 4e370013-5dfb-467c-8709-c0a0b256a9aa was re-scheduled: A specified parameter was not correct: fileType [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Faults: ['InvalidArgument'] [ 1588.676823] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] During handling of the above exception, another exception occurred: [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] exception_handler_v20(status_code, error_body) [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise client_exc(message=error_message, [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Neutron server returns request_ids: ['req-bf69b06c-1c89-4e4a-96ff-8856b118b931'] [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.677241] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] During handling of the above exception, another exception occurred: [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._deallocate_network(context, instance, requested_networks) [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.network_api.deallocate_for_instance( [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] data = neutron.list_ports(**search_opts) [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.list('ports', self.ports_path, retrieve_all, [ 1588.677701] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] for r in self._pagination(collection, path, **params): [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] res = self.get(path, params=params) [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.retry_request("GET", action, body=body, [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1588.678070] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.do_request(method, action, body=body, [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._handle_fault_response(status_code, replybody, resp) [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise exception.Unauthorized() [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] nova.exception.Unauthorized: Not authorized. [ 1588.678388] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.744213] env[67424]: INFO nova.scheduler.client.report [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Deleted allocations for instance 4e370013-5dfb-467c-8709-c0a0b256a9aa [ 1588.769627] env[67424]: DEBUG oslo_concurrency.lockutils [None req-7b4c7c73-d1a1-473a-9ce9-e668374c2b35 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 682.372s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.770531] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 485.735s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.770757] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Acquiring lock "4e370013-5dfb-467c-8709-c0a0b256a9aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.770963] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.771186] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.773147] env[67424]: INFO nova.compute.manager [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Terminating instance [ 1588.774883] env[67424]: DEBUG nova.compute.manager [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1588.775172] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1588.775541] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce759624-418b-43c5-9b24-8db6dd32dd8e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.779840] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1588.786397] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ac5cc9-c74a-4f26-8589-00818d143a21 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.817224] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4e370013-5dfb-467c-8709-c0a0b256a9aa could not be found. [ 1588.817224] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1588.817224] env[67424]: INFO nova.compute.manager [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1588.817224] env[67424]: DEBUG oslo.service.loopingcall [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.817469] env[67424]: DEBUG nova.compute.manager [-] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1588.817564] env[67424]: DEBUG nova.network.neutron [-] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1588.835417] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.835660] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.837169] env[67424]: INFO nova.compute.claims [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1588.938741] env[67424]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67424) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1588.939064] env[67424]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-10f226a6-6074-4582-ae2d-502df96b37b4'] [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1588.939777] env[67424]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1588.940201] env[67424]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1588.940704] env[67424]: ERROR oslo.service.loopingcall [ 1588.941159] env[67424]: ERROR nova.compute.manager [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1588.971408] env[67424]: ERROR nova.compute.manager [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] exception_handler_v20(status_code, error_body) [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise client_exc(message=error_message, [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1588.971408] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Neutron server returns request_ids: ['req-10f226a6-6074-4582-ae2d-502df96b37b4'] [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] During handling of the above exception, another exception occurred: [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Traceback (most recent call last): [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._delete_instance(context, instance, bdms) [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._shutdown_instance(context, instance, bdms) [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._try_deallocate_network(context, instance, requested_networks) [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] with excutils.save_and_reraise_exception(): [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1588.971717] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.force_reraise() [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise self.value [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] _deallocate_network_with_retries() [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return evt.wait() [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] result = hub.switch() [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.greenlet.switch() [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1588.972194] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] result = func(*self.args, **self.kw) [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] result = f(*args, **kwargs) [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._deallocate_network( [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self.network_api.deallocate_for_instance( [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] data = neutron.list_ports(**search_opts) [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.list('ports', self.ports_path, retrieve_all, [ 1588.972544] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] for r in self._pagination(collection, path, **params): [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] res = self.get(path, params=params) [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.retry_request("GET", action, body=body, [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1588.972955] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] return self.do_request(method, action, body=body, [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] ret = obj(*args, **kwargs) [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] self._handle_fault_response(status_code, replybody, resp) [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1588.973321] env[67424]: ERROR nova.compute.manager [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] [ 1589.002343] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.230s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.002343] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 354.257s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.002343] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] During sync_power_state the instance has a pending task (deleting). Skip. [ 1589.002654] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "4e370013-5dfb-467c-8709-c0a0b256a9aa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.054371] env[67424]: INFO nova.compute.manager [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] [instance: 4e370013-5dfb-467c-8709-c0a0b256a9aa] Successfully reverted task state from None on failure for instance. [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server [None req-d2b5f71f-7107-4e2e-9487-02f622a5de84 tempest-DeleteServersAdminTestJSON-1997572119 tempest-DeleteServersAdminTestJSON-1997572119-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-10f226a6-6074-4582-ae2d-502df96b37b4'] [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1589.059139] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1589.059612] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1589.060131] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1589.060616] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1589.061099] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1589.061653] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1589.062156] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1589.062156] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1589.062156] env[67424]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1589.062156] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1589.062156] env[67424]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1589.062156] env[67424]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1589.062156] env[67424]: ERROR oslo_messaging.rpc.server [ 1589.100265] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16de64be-109f-4899-a360-630bd9e53c97 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.108333] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8f2971-1d64-4b59-bcf9-e78e27794cb1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.139926] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de776fee-8996-4139-b0e6-29dc0c868e8f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.147064] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e40a9f-121b-4056-9497-48caa99fb5a0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.160144] env[67424]: DEBUG nova.compute.provider_tree [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.169688] env[67424]: DEBUG nova.scheduler.client.report [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1589.184437] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.349s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.184960] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1589.216291] env[67424]: DEBUG nova.compute.utils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1589.217767] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1589.217940] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1589.225921] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1589.275655] env[67424]: DEBUG nova.policy [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd1cd231b6964e58a1f818ce97b810aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9687dd492fad4c8cabec102278f19545', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1589.292402] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1589.317815] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1589.318062] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1589.318227] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1589.318407] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1589.318553] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1589.318698] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1589.318904] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1589.319075] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1589.319246] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1589.319408] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1589.319582] env[67424]: DEBUG nova.virt.hardware [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1589.320461] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c88eb7a-fb74-46e5-9f09-d999494c2a2f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.328429] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19641a82-6664-4381-9b3b-72ec820b6d43 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.596533] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Successfully created port: d46b6b84-172e-4bac-8a9d-f7891723d1a0 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1590.602080] env[67424]: DEBUG nova.compute.manager [req-208e6159-4b97-4f6d-9375-33023dac13e9 req-eaf5f653-31a1-496b-9a91-c4750acc57d3 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Received event network-vif-plugged-d46b6b84-172e-4bac-8a9d-f7891723d1a0 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1590.602408] env[67424]: DEBUG oslo_concurrency.lockutils [req-208e6159-4b97-4f6d-9375-33023dac13e9 req-eaf5f653-31a1-496b-9a91-c4750acc57d3 service nova] Acquiring lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.602568] env[67424]: DEBUG oslo_concurrency.lockutils [req-208e6159-4b97-4f6d-9375-33023dac13e9 req-eaf5f653-31a1-496b-9a91-c4750acc57d3 service nova] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.602736] env[67424]: DEBUG oslo_concurrency.lockutils [req-208e6159-4b97-4f6d-9375-33023dac13e9 req-eaf5f653-31a1-496b-9a91-c4750acc57d3 service nova] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.602903] env[67424]: DEBUG nova.compute.manager [req-208e6159-4b97-4f6d-9375-33023dac13e9 req-eaf5f653-31a1-496b-9a91-c4750acc57d3 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] No waiting events found dispatching network-vif-plugged-d46b6b84-172e-4bac-8a9d-f7891723d1a0 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1590.604392] env[67424]: WARNING nova.compute.manager [req-208e6159-4b97-4f6d-9375-33023dac13e9 req-eaf5f653-31a1-496b-9a91-c4750acc57d3 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Received unexpected event network-vif-plugged-d46b6b84-172e-4bac-8a9d-f7891723d1a0 for instance with vm_state building and task_state spawning. [ 1590.692362] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Successfully updated port: d46b6b84-172e-4bac-8a9d-f7891723d1a0 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1590.703614] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "refresh_cache-72d5f322-47e3-402e-abcc-1b5b0497bc1f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.703761] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquired lock "refresh_cache-72d5f322-47e3-402e-abcc-1b5b0497bc1f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.703908] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1590.740987] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1590.897524] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Updating instance_info_cache with network_info: [{"id": "d46b6b84-172e-4bac-8a9d-f7891723d1a0", "address": "fa:16:3e:8b:89:35", "network": {"id": "cd43897a-f745-4646-a80d-3c2abc775903", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-873319255-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9687dd492fad4c8cabec102278f19545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd46b6b84-17", "ovs_interfaceid": "d46b6b84-172e-4bac-8a9d-f7891723d1a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.911512] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Releasing lock "refresh_cache-72d5f322-47e3-402e-abcc-1b5b0497bc1f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.911801] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Instance network_info: |[{"id": "d46b6b84-172e-4bac-8a9d-f7891723d1a0", "address": "fa:16:3e:8b:89:35", "network": {"id": "cd43897a-f745-4646-a80d-3c2abc775903", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-873319255-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9687dd492fad4c8cabec102278f19545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd46b6b84-17", "ovs_interfaceid": "d46b6b84-172e-4bac-8a9d-f7891723d1a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1590.912249] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:89:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c6a4836-66dc-4e43-982b-f8fcd3f9989a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd46b6b84-172e-4bac-8a9d-f7891723d1a0', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1590.919943] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Creating folder: Project (9687dd492fad4c8cabec102278f19545). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1590.920459] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d0a01c5-486d-41cc-bbfb-53a5ac9c976c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.931933] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Created folder: Project (9687dd492fad4c8cabec102278f19545) in parent group-v639843. [ 1590.932124] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Creating folder: Instances. Parent ref: group-v639931. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1590.932359] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28bccea6-11a4-4cbe-bea6-fe591ee7f13d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.940454] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Created folder: Instances in parent group-v639931. [ 1590.940684] env[67424]: DEBUG oslo.service.loopingcall [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1590.940858] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1590.941065] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ede6d14b-8136-46f2-acbe-9581a00889ce {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.959758] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1590.959758] env[67424]: value = "task-3200047" [ 1590.959758] env[67424]: _type = "Task" [ 1590.959758] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.966759] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200047, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.470030] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200047, 'name': CreateVM_Task, 'duration_secs': 0.326555} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.470209] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1591.470847] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.471026] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.471383] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1591.471636] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8031d0e8-5ebd-454b-b3d7-96b719a011a0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.475996] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Waiting for the task: (returnval){ [ 1591.475996] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5260962e-7d34-9f19-cdfb-acd06b81d67a" [ 1591.475996] env[67424]: _type = "Task" [ 1591.475996] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.483205] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5260962e-7d34-9f19-cdfb-acd06b81d67a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.986237] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.986651] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1591.986651] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.639047] env[67424]: DEBUG nova.compute.manager [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Received event network-changed-d46b6b84-172e-4bac-8a9d-f7891723d1a0 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1592.639233] env[67424]: DEBUG nova.compute.manager [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Refreshing instance network info cache due to event network-changed-d46b6b84-172e-4bac-8a9d-f7891723d1a0. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1592.639439] env[67424]: DEBUG oslo_concurrency.lockutils [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] Acquiring lock "refresh_cache-72d5f322-47e3-402e-abcc-1b5b0497bc1f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.639581] env[67424]: DEBUG oslo_concurrency.lockutils [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] Acquired lock "refresh_cache-72d5f322-47e3-402e-abcc-1b5b0497bc1f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.639738] env[67424]: DEBUG nova.network.neutron [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Refreshing network info cache for port d46b6b84-172e-4bac-8a9d-f7891723d1a0 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1592.926761] env[67424]: DEBUG nova.network.neutron [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Updated VIF entry in instance network info cache for port d46b6b84-172e-4bac-8a9d-f7891723d1a0. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1592.927124] env[67424]: DEBUG nova.network.neutron [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Updating instance_info_cache with network_info: [{"id": "d46b6b84-172e-4bac-8a9d-f7891723d1a0", "address": "fa:16:3e:8b:89:35", "network": {"id": "cd43897a-f745-4646-a80d-3c2abc775903", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-873319255-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9687dd492fad4c8cabec102278f19545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd46b6b84-17", "ovs_interfaceid": "d46b6b84-172e-4bac-8a9d-f7891723d1a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.937383] env[67424]: DEBUG oslo_concurrency.lockutils [req-0d01aa8c-0b24-4227-b9fa-14f2b89eebd5 req-baab7e39-56c5-4b9d-b3af-152b014287b7 service nova] Releasing lock "refresh_cache-72d5f322-47e3-402e-abcc-1b5b0497bc1f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.362271] env[67424]: WARNING oslo_vmware.rw_handles [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1633.362271] env[67424]: ERROR oslo_vmware.rw_handles [ 1633.362271] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1633.364736] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1633.365114] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Copying Virtual Disk [datastore2] vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/a012ccb5-abb1-4a5c-92a3-c8078f7609d8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1633.365474] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de5ac16e-9e9b-45de-a952-a1f36de31ef9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.374783] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Waiting for the task: (returnval){ [ 1633.374783] env[67424]: value = "task-3200048" [ 1633.374783] env[67424]: _type = "Task" [ 1633.374783] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.383446] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Task: {'id': task-3200048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.884987] env[67424]: DEBUG oslo_vmware.exceptions [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1633.885312] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.885871] env[67424]: ERROR nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1633.885871] env[67424]: Faults: ['InvalidArgument'] [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Traceback (most recent call last): [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] yield resources [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self.driver.spawn(context, instance, image_meta, [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self._fetch_image_if_missing(context, vi) [ 1633.885871] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] image_cache(vi, tmp_image_ds_loc) [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] vm_util.copy_virtual_disk( [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] session._wait_for_task(vmdk_copy_task) [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] return self.wait_for_task(task_ref) [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] return evt.wait() [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] result = hub.switch() [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1633.886299] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] return self.greenlet.switch() [ 1633.886740] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1633.886740] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self.f(*self.args, **self.kw) [ 1633.886740] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1633.886740] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] raise exceptions.translate_fault(task_info.error) [ 1633.886740] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1633.886740] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Faults: ['InvalidArgument'] [ 1633.886740] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] [ 1633.886740] env[67424]: INFO nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Terminating instance [ 1633.887825] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.888067] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1633.888315] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b18911a-f123-4592-b80a-55e5a36363a8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.891754] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1633.891954] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1633.892693] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec73825-1c23-4249-ae25-22b24ff869a6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.899675] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1633.899887] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b4eb598-23fb-43e1-9835-146e425373f3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.902029] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1633.902213] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1633.903438] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a36e5a9-e9f1-41c5-a0f6-91a0d6208d4d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.907854] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Waiting for the task: (returnval){ [ 1633.907854] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52da30b0-cac8-bb91-0422-f7c38c7d802d" [ 1633.907854] env[67424]: _type = "Task" [ 1633.907854] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.914614] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52da30b0-cac8-bb91-0422-f7c38c7d802d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.975415] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1633.975648] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1633.975846] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Deleting the datastore file [datastore2] aa454838-2a3f-40a0-825d-1b3f2656a6a5 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1633.976162] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c897749-223d-434e-9ef3-1c590bf8db3e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.982343] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Waiting for the task: (returnval){ [ 1633.982343] env[67424]: value = "task-3200050" [ 1633.982343] env[67424]: _type = "Task" [ 1633.982343] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.991149] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Task: {'id': task-3200050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.419800] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1634.420090] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Creating directory with path [datastore2] vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1634.420325] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e9abb55-83be-4507-8aea-aade9011ce62 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.431137] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Created directory with path [datastore2] vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1634.431336] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Fetch image to [datastore2] vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1634.431518] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1634.432274] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722c6afc-efc7-43fb-937e-c5253c24d13b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.438917] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355409ca-3835-4988-95e3-91cf2265b154 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.447800] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76068827-62cc-4f65-b759-916fbaf155b0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.477695] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e1e5cc-b1fc-40fb-bf2e-cb443d63d974 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.485741] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-635ef0f6-c553-441a-bad2-8f3f0e8b25e5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.491737] env[67424]: DEBUG oslo_vmware.api [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Task: {'id': task-3200050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077851} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.491969] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1634.492213] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1634.492407] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1634.492582] env[67424]: INFO nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1634.494721] env[67424]: DEBUG nova.compute.claims [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1634.494889] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.495130] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.579142] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1634.631137] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1634.692955] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1634.693228] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1634.784426] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73b18e9-7c98-412b-a199-28c332de8cfb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.791837] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ec8e92-cb4f-4c14-98ac-a49438633760 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.822419] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4081d529-ef28-4bc3-aee5-1dfda6f9f499 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.829304] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ea3ed8-2599-4e1c-ae96-e24ce3fc1b38 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.841999] env[67424]: DEBUG nova.compute.provider_tree [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.850564] env[67424]: DEBUG nova.scheduler.client.report [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1634.864671] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.369s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.865240] env[67424]: ERROR nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1634.865240] env[67424]: Faults: ['InvalidArgument'] [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Traceback (most recent call last): [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self.driver.spawn(context, instance, image_meta, [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self._fetch_image_if_missing(context, vi) [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] image_cache(vi, tmp_image_ds_loc) [ 1634.865240] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] vm_util.copy_virtual_disk( [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] session._wait_for_task(vmdk_copy_task) [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] return self.wait_for_task(task_ref) [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] return evt.wait() [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] result = hub.switch() [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] return self.greenlet.switch() [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1634.865614] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] self.f(*self.args, **self.kw) [ 1634.865967] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1634.865967] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] raise exceptions.translate_fault(task_info.error) [ 1634.865967] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1634.865967] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Faults: ['InvalidArgument'] [ 1634.865967] env[67424]: ERROR nova.compute.manager [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] [ 1634.865967] env[67424]: DEBUG nova.compute.utils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1634.867290] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Build of instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 was re-scheduled: A specified parameter was not correct: fileType [ 1634.867290] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1634.867685] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1634.867856] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1634.868037] env[67424]: DEBUG nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1634.868203] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1635.478583] env[67424]: DEBUG nova.network.neutron [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.491795] env[67424]: INFO nova.compute.manager [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Took 0.62 seconds to deallocate network for instance. [ 1635.598474] env[67424]: INFO nova.scheduler.client.report [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Deleted allocations for instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 [ 1635.625191] env[67424]: DEBUG oslo_concurrency.lockutils [None req-254c7eb0-e46a-41b2-84e9-f522f1e9ef53 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 674.534s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.628538] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 478.407s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.629095] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Acquiring lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.629095] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.629883] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.631697] env[67424]: INFO nova.compute.manager [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Terminating instance [ 1635.636373] env[67424]: DEBUG nova.compute.manager [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1635.636373] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1635.636373] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68af753f-8672-4e1c-840a-751a93908aaf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.639252] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1635.649156] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84425b77-4ecf-4fdf-b386-f66350ea2164 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.682674] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa454838-2a3f-40a0-825d-1b3f2656a6a5 could not be found. [ 1635.682966] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1635.683254] env[67424]: INFO nova.compute.manager [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1635.683588] env[67424]: DEBUG oslo.service.loopingcall [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1635.686234] env[67424]: DEBUG nova.compute.manager [-] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1635.686394] env[67424]: DEBUG nova.network.neutron [-] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1635.709155] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.709374] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.710840] env[67424]: INFO nova.compute.claims [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1635.722170] env[67424]: DEBUG nova.network.neutron [-] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.738979] env[67424]: INFO nova.compute.manager [-] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] Took 0.05 seconds to deallocate network for instance. [ 1635.831351] env[67424]: DEBUG oslo_concurrency.lockutils [None req-cdecaa3a-4e0f-4366-b774-f22a9c1a5574 tempest-AttachInterfacesUnderV243Test-1191532806 tempest-AttachInterfacesUnderV243Test-1191532806-project-member] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.832291] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 401.087s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.832531] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: aa454838-2a3f-40a0-825d-1b3f2656a6a5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1635.832743] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "aa454838-2a3f-40a0-825d-1b3f2656a6a5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.932193] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d136f586-a7dc-47a7-b428-12774641f893 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.940053] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e58fc2-61c2-48e3-bbfd-a57bb72f920e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.969697] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ae73c4-e4bb-4ff8-8667-8bdfc137bac5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.977371] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fa141b-3999-46fc-9be7-7ce84a7b0d59 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.990299] env[67424]: DEBUG nova.compute.provider_tree [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.999299] env[67424]: DEBUG nova.scheduler.client.report [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1636.013190] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.013704] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1636.047485] env[67424]: DEBUG nova.compute.utils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1636.048893] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1636.049116] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1636.057233] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1636.103028] env[67424]: DEBUG nova.policy [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66293d02fafb4d29a9f66c200ddb4624', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fcf3dd62a69439a8bba32c3be865075', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1636.118817] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1636.143833] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1636.144095] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1636.144260] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1636.144442] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1636.144590] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1636.144735] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1636.144942] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1636.145138] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1636.145327] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1636.145493] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1636.145760] env[67424]: DEBUG nova.virt.hardware [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1636.146586] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c514d36c-a702-492d-b0c7-63626a93a850 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.154431] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1675319d-4253-48ea-b70b-3c15231e58c8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.388059] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.388234] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.388281] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1636.500393] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Successfully created port: 813dc37b-20ac-49cf-91aa-0fcb63668214 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.329340] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Successfully updated port: 813dc37b-20ac-49cf-91aa-0fcb63668214 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1637.339940] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "refresh_cache-7e457262-ef1d-469e-8c36-b0f341a00e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1637.340095] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired lock "refresh_cache-7e457262-ef1d-469e-8c36-b0f341a00e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1637.340958] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1637.380701] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1637.545341] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Updating instance_info_cache with network_info: [{"id": "813dc37b-20ac-49cf-91aa-0fcb63668214", "address": "fa:16:3e:97:0c:78", "network": {"id": "ec588e71-fce2-4e83-bdf5-929cc1ba9805", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1718376150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcf3dd62a69439a8bba32c3be865075", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap813dc37b-20", "ovs_interfaceid": "813dc37b-20ac-49cf-91aa-0fcb63668214", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.556277] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Releasing lock "refresh_cache-7e457262-ef1d-469e-8c36-b0f341a00e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.556577] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Instance network_info: |[{"id": "813dc37b-20ac-49cf-91aa-0fcb63668214", "address": "fa:16:3e:97:0c:78", "network": {"id": "ec588e71-fce2-4e83-bdf5-929cc1ba9805", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1718376150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcf3dd62a69439a8bba32c3be865075", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap813dc37b-20", "ovs_interfaceid": "813dc37b-20ac-49cf-91aa-0fcb63668214", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1637.557039] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:0c:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '813dc37b-20ac-49cf-91aa-0fcb63668214', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1637.564892] env[67424]: DEBUG oslo.service.loopingcall [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1637.565475] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1637.565712] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ecc1992-8965-4fb4-bcf6-b30c80a5fa03 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.588116] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1637.588116] env[67424]: value = "task-3200051" [ 1637.588116] env[67424]: _type = "Task" [ 1637.588116] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.597239] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200051, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.755620] env[67424]: DEBUG nova.compute.manager [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Received event network-vif-plugged-813dc37b-20ac-49cf-91aa-0fcb63668214 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1637.755900] env[67424]: DEBUG oslo_concurrency.lockutils [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] Acquiring lock "7e457262-ef1d-469e-8c36-b0f341a00e9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.756351] env[67424]: DEBUG oslo_concurrency.lockutils [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.756576] env[67424]: DEBUG oslo_concurrency.lockutils [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.756795] env[67424]: DEBUG nova.compute.manager [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] No waiting events found dispatching network-vif-plugged-813dc37b-20ac-49cf-91aa-0fcb63668214 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1637.757017] env[67424]: WARNING nova.compute.manager [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Received unexpected event network-vif-plugged-813dc37b-20ac-49cf-91aa-0fcb63668214 for instance with vm_state building and task_state spawning. [ 1637.757270] env[67424]: DEBUG nova.compute.manager [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Received event network-changed-813dc37b-20ac-49cf-91aa-0fcb63668214 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1637.757560] env[67424]: DEBUG nova.compute.manager [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Refreshing instance network info cache due to event network-changed-813dc37b-20ac-49cf-91aa-0fcb63668214. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1637.757682] env[67424]: DEBUG oslo_concurrency.lockutils [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] Acquiring lock "refresh_cache-7e457262-ef1d-469e-8c36-b0f341a00e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1637.757859] env[67424]: DEBUG oslo_concurrency.lockutils [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] Acquired lock "refresh_cache-7e457262-ef1d-469e-8c36-b0f341a00e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1637.758051] env[67424]: DEBUG nova.network.neutron [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Refreshing network info cache for port 813dc37b-20ac-49cf-91aa-0fcb63668214 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1638.097942] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200051, 'name': CreateVM_Task, 'duration_secs': 0.29688} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.098186] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1638.098855] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.099027] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.099367] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1638.099632] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9cf38b2-eea4-47d6-abcd-d41e0a824afc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.104077] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 1638.104077] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52cfd92c-498c-6bd7-0614-2e5463f65dec" [ 1638.104077] env[67424]: _type = "Task" [ 1638.104077] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.111555] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52cfd92c-498c-6bd7-0614-2e5463f65dec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.436903] env[67424]: DEBUG nova.network.neutron [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Updated VIF entry in instance network info cache for port 813dc37b-20ac-49cf-91aa-0fcb63668214. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1638.437279] env[67424]: DEBUG nova.network.neutron [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Updating instance_info_cache with network_info: [{"id": "813dc37b-20ac-49cf-91aa-0fcb63668214", "address": "fa:16:3e:97:0c:78", "network": {"id": "ec588e71-fce2-4e83-bdf5-929cc1ba9805", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1718376150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcf3dd62a69439a8bba32c3be865075", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap813dc37b-20", "ovs_interfaceid": "813dc37b-20ac-49cf-91aa-0fcb63668214", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.447660] env[67424]: DEBUG oslo_concurrency.lockutils [req-630ee8ad-afd2-4606-af16-01f16b0b626c req-e26b4817-154a-4654-a261-aa4e76eddd8b service nova] Releasing lock "refresh_cache-7e457262-ef1d-469e-8c36-b0f341a00e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.621208] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.621635] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1638.621962] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.388314] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1639.388657] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.383775] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.387419] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.399278] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.399493] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.399659] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.399813] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1640.401421] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2cb582-70ca-4ccb-8fde-8b1ce8570488 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.410398] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e3df7c-dbd8-48e9-a55a-989230e5169b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.424772] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766fa898-aabe-4a2c-8781-570c923806ec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.431099] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253b5509-65c5-4b6c-bc1b-29e60ee91a68 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.460065] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181002MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1640.460065] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.460065] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.534980] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.535238] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.535383] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.535509] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.535631] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.535748] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.535863] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.535978] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.536102] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.536217] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.546902] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 2cdeaa5b-d8ce-4083-9a3f-cd514d943143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1640.557637] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1640.567433] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 251ac24f-18d2-42e6-ba08-87ca676f9261 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1640.577971] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 1ce795f5-58c4-4f28-9ae5-07c5dad82c2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1640.588830] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance dc0e72cb-20af-4116-86a4-94b464272cff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1640.589073] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1640.589224] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1640.762963] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1301b277-6942-4a54-8a42-12f0e364030a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.771865] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7098a8ee-12f1-4dde-b58f-529e2d6b9194 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.801348] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dc9dc7-910b-4abf-b679-560d4e5e5839 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.808472] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568b6d7c-71d0-441d-aa19-76e499c82744 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.821071] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1640.830244] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1640.843432] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1640.843604] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.384s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.844064] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1642.388275] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.388583] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.388982] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1645.388982] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1645.408445] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.408621] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.408731] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.408840] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.408959] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.409097] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.409221] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.409373] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.409508] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.409629] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1645.409748] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1652.904824] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.328358] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.938973] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.939415] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.859603] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.859927] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.278849] env[67424]: WARNING oslo_vmware.rw_handles [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1682.278849] env[67424]: ERROR oslo_vmware.rw_handles [ 1682.279344] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1682.281839] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1682.282169] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Copying Virtual Disk [datastore2] vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/6a24c47b-c551-47d0-a73f-b1903b41629f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1682.282492] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6047eca-913e-408d-a6d9-6b5d4373787c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.290233] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Waiting for the task: (returnval){ [ 1682.290233] env[67424]: value = "task-3200052" [ 1682.290233] env[67424]: _type = "Task" [ 1682.290233] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.299433] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Task: {'id': task-3200052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.801261] env[67424]: DEBUG oslo_vmware.exceptions [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1682.801563] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1682.802427] env[67424]: ERROR nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1682.802427] env[67424]: Faults: ['InvalidArgument'] [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Traceback (most recent call last): [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] yield resources [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self.driver.spawn(context, instance, image_meta, [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._fetch_image_if_missing(context, vi) [ 1682.802427] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] image_cache(vi, tmp_image_ds_loc) [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] vm_util.copy_virtual_disk( [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] session._wait_for_task(vmdk_copy_task) [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.wait_for_task(task_ref) [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return evt.wait() [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] result = hub.switch() [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1682.802799] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.greenlet.switch() [ 1682.803135] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1682.803135] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self.f(*self.args, **self.kw) [ 1682.803135] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1682.803135] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] raise exceptions.translate_fault(task_info.error) [ 1682.803135] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1682.803135] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Faults: ['InvalidArgument'] [ 1682.803135] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] [ 1682.803135] env[67424]: INFO nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Terminating instance [ 1682.804423] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.804589] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1682.804829] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ade411c5-22d8-4e70-b751-39fee6ba7332 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.807212] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.807460] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquired lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.807557] env[67424]: DEBUG nova.network.neutron [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1682.814617] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1682.814795] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1682.816138] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-438eeb31-2f9d-44bf-a196-988ea59c7a1c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.823482] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 1682.823482] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521e682f-1eac-911b-20f8-3a841145777c" [ 1682.823482] env[67424]: _type = "Task" [ 1682.823482] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.830875] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521e682f-1eac-911b-20f8-3a841145777c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.849492] env[67424]: DEBUG nova.network.neutron [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1683.022994] env[67424]: DEBUG nova.network.neutron [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.032462] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Releasing lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.032868] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1683.033076] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1683.034200] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c4da15-8a94-425c-be60-97eb3820862f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.043271] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1683.043571] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-252739f3-c679-41f5-ac2c-61621ab34d3e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.080920] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1683.081173] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1683.081386] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Deleting the datastore file [datastore2] cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.081636] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-003e1e7a-1745-4fdc-96e6-9f4b05d44617 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.087549] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Waiting for the task: (returnval){ [ 1683.087549] env[67424]: value = "task-3200054" [ 1683.087549] env[67424]: _type = "Task" [ 1683.087549] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.096470] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Task: {'id': task-3200054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.336506] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1683.336506] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating directory with path [datastore2] vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1683.336506] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80b80125-a50c-4c86-adb9-e02b8ed86d94 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.345624] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Created directory with path [datastore2] vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1683.345624] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Fetch image to [datastore2] vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1683.345624] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1683.346451] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b35936-0d9f-4127-9841-6e1470be4b72 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.353598] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ddf710-c192-474d-880d-01bd5c9aae01 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.362303] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc13d38a-8c79-4e0c-a350-f03fb8073d22 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.393425] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff1d7e5-67db-43b7-88bb-ad871ffc445e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.402063] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a6a1b444-39eb-4a54-8f5c-24e584daaf8b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.423868] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1683.488140] env[67424]: DEBUG oslo_vmware.rw_handles [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1683.549150] env[67424]: DEBUG oslo_vmware.rw_handles [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1683.549356] env[67424]: DEBUG oslo_vmware.rw_handles [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1683.597201] env[67424]: DEBUG oslo_vmware.api [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Task: {'id': task-3200054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0312} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.597486] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.597671] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1683.597841] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1683.598048] env[67424]: INFO nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1683.598309] env[67424]: DEBUG oslo.service.loopingcall [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.598520] env[67424]: DEBUG nova.compute.manager [-] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1683.601304] env[67424]: DEBUG nova.compute.claims [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1683.601637] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.601919] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.846781] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8d4dcc-8be5-4b36-b193-943a52f490ea {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.855735] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29e1a45-527f-4b26-b39c-f32bc988dbbd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.885042] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1497f33-24d2-4be3-851e-33cb42ad8d71 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.894326] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c62636-5c00-4afd-b538-e6f9ecba3c73 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.908603] env[67424]: DEBUG nova.compute.provider_tree [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.919711] env[67424]: DEBUG nova.scheduler.client.report [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1683.939447] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.939987] env[67424]: ERROR nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1683.939987] env[67424]: Faults: ['InvalidArgument'] [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Traceback (most recent call last): [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self.driver.spawn(context, instance, image_meta, [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._fetch_image_if_missing(context, vi) [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] image_cache(vi, tmp_image_ds_loc) [ 1683.939987] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] vm_util.copy_virtual_disk( [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] session._wait_for_task(vmdk_copy_task) [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.wait_for_task(task_ref) [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return evt.wait() [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] result = hub.switch() [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.greenlet.switch() [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1683.940299] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self.f(*self.args, **self.kw) [ 1683.940598] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1683.940598] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] raise exceptions.translate_fault(task_info.error) [ 1683.940598] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1683.940598] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Faults: ['InvalidArgument'] [ 1683.940598] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] [ 1683.940872] env[67424]: DEBUG nova.compute.utils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1683.944665] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Build of instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 was re-scheduled: A specified parameter was not correct: fileType [ 1683.944665] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1683.944665] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1683.944665] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.944665] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquired lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.944864] env[67424]: DEBUG nova.network.neutron [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1683.989437] env[67424]: DEBUG nova.network.neutron [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1684.153976] env[67424]: DEBUG nova.network.neutron [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.164679] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Releasing lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.164926] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1684.165132] env[67424]: DEBUG nova.compute.manager [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1684.255549] env[67424]: INFO nova.scheduler.client.report [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Deleted allocations for instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 [ 1684.284548] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b1fed55-19e4-4c3f-a1cd-471ec01aed2e tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.844s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.284548] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 449.537s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.284548] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] During sync_power_state the instance has a pending task (spawning). Skip. [ 1684.284548] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.284760] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 427.613s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.284760] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.284760] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.285116] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.287475] env[67424]: INFO nova.compute.manager [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Terminating instance [ 1684.296020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquiring lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.296020] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Acquired lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.296020] env[67424]: DEBUG nova.network.neutron [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1684.296020] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 2cdeaa5b-d8ce-4083-9a3f-cd514d943143] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1684.331275] env[67424]: DEBUG nova.network.neutron [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1684.334614] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 2cdeaa5b-d8ce-4083-9a3f-cd514d943143] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1684.358542] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "2cdeaa5b-d8ce-4083-9a3f-cd514d943143" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.703s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.372045] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1684.426536] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.426795] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.428246] env[67424]: INFO nova.compute.claims [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1684.445350] env[67424]: DEBUG nova.network.neutron [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.453451] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Releasing lock "refresh_cache-cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.453963] env[67424]: DEBUG nova.compute.manager [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1684.454233] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1684.454742] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa56a40a-f88b-4c56-bb20-27dfeb31449e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.466337] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2babf93c-a6f7-4d6f-82b5-bdb3732c03ac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.497847] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501 could not be found. [ 1684.498069] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1684.498258] env[67424]: INFO nova.compute.manager [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1684.498498] env[67424]: DEBUG oslo.service.loopingcall [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.498717] env[67424]: DEBUG nova.compute.manager [-] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1684.498857] env[67424]: DEBUG nova.network.neutron [-] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1684.644665] env[67424]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67424) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1684.644665] env[67424]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1684.644665] env[67424]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-bc25e1a2-45b0-463c-8a44-84054db8f202'] [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1684.645247] env[67424]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1684.645775] env[67424]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1684.646349] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1684.646349] env[67424]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1684.646349] env[67424]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1684.646349] env[67424]: ERROR oslo.service.loopingcall [ 1684.646349] env[67424]: ERROR nova.compute.manager [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1684.677378] env[67424]: ERROR nova.compute.manager [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Traceback (most recent call last): [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] ret = obj(*args, **kwargs) [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] exception_handler_v20(status_code, error_body) [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] raise client_exc(message=error_message, [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Neutron server returns request_ids: ['req-bc25e1a2-45b0-463c-8a44-84054db8f202'] [ 1684.677378] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] During handling of the above exception, another exception occurred: [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Traceback (most recent call last): [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._delete_instance(context, instance, bdms) [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._shutdown_instance(context, instance, bdms) [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._try_deallocate_network(context, instance, requested_networks) [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] with excutils.save_and_reraise_exception(): [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1684.677759] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self.force_reraise() [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] raise self.value [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] _deallocate_network_with_retries() [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return evt.wait() [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] result = hub.switch() [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.greenlet.switch() [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1684.678154] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] result = func(*self.args, **self.kw) [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] result = f(*args, **kwargs) [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._deallocate_network( [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self.network_api.deallocate_for_instance( [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] data = neutron.list_ports(**search_opts) [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] ret = obj(*args, **kwargs) [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.list('ports', self.ports_path, retrieve_all, [ 1684.678467] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] ret = obj(*args, **kwargs) [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] for r in self._pagination(collection, path, **params): [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] res = self.get(path, params=params) [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] ret = obj(*args, **kwargs) [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.retry_request("GET", action, body=body, [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] ret = obj(*args, **kwargs) [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1684.678841] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] return self.do_request(method, action, body=body, [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] ret = obj(*args, **kwargs) [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] self._handle_fault_response(status_code, replybody, resp) [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1684.679226] env[67424]: ERROR nova.compute.manager [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] [ 1684.684822] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb918ee-cd00-4576-b45b-266d48428b82 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.693351] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34c5c38-fdb4-4412-927f-a38bd9b342f1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.725780] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Lock "cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.442s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.727399] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d051a8a-28aa-4b3e-a1f3-4579c21f30d3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.736683] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57076f5e-a58b-4e0e-a4dd-737cb677d4d9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.750759] env[67424]: DEBUG nova.compute.provider_tree [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1684.760439] env[67424]: DEBUG nova.scheduler.client.report [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1684.774028] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.346s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.774028] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1684.779827] env[67424]: INFO nova.compute.manager [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] [instance: cb5b6a03-e9f3-4c0c-9b9e-5b2c5332d501] Successfully reverted task state from None on failure for instance. [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server [None req-8b4fdbb8-eb0a-41dc-99aa-cd0b9c470601 tempest-ServersAaction247Test-1153257357 tempest-ServersAaction247Test-1153257357-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-bc25e1a2-45b0-463c-8a44-84054db8f202'] [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1684.782927] env[67424]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1684.783462] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1684.783951] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1684.784457] env[67424]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.784908] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1684.785421] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1684.785886] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1684.785886] env[67424]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1684.785886] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1684.785886] env[67424]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1684.785886] env[67424]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1684.785886] env[67424]: ERROR oslo_messaging.rpc.server [ 1684.806942] env[67424]: DEBUG nova.compute.utils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1684.808141] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1684.808322] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1684.821937] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1684.864915] env[67424]: DEBUG nova.policy [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c51b8467e9a4dd7b7259edacd7f0fca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '32577de731d749ffb2939075f98687dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1684.884017] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1684.911321] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1684.911547] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1684.911706] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1684.911883] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1684.912039] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1684.912197] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1684.912408] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1684.912568] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1684.912767] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1684.912898] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1684.913090] env[67424]: DEBUG nova.virt.hardware [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1684.913965] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0406ae-eb55-477c-b65e-1d258db33c7e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.922776] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e955a20-0a68-4b88-ae17-45f773508a83 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.418518] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Successfully created port: 5c910418-515a-499f-91e1-02b95ee9f010 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1686.113859] env[67424]: DEBUG nova.compute.manager [req-31afa49f-b313-42ba-a808-f4e26ee35e4c req-dcbbbf47-9396-4e8b-85a6-551521a4ce74 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Received event network-vif-plugged-5c910418-515a-499f-91e1-02b95ee9f010 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1686.115340] env[67424]: DEBUG oslo_concurrency.lockutils [req-31afa49f-b313-42ba-a808-f4e26ee35e4c req-dcbbbf47-9396-4e8b-85a6-551521a4ce74 service nova] Acquiring lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.116621] env[67424]: DEBUG oslo_concurrency.lockutils [req-31afa49f-b313-42ba-a808-f4e26ee35e4c req-dcbbbf47-9396-4e8b-85a6-551521a4ce74 service nova] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.116621] env[67424]: DEBUG oslo_concurrency.lockutils [req-31afa49f-b313-42ba-a808-f4e26ee35e4c req-dcbbbf47-9396-4e8b-85a6-551521a4ce74 service nova] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.116621] env[67424]: DEBUG nova.compute.manager [req-31afa49f-b313-42ba-a808-f4e26ee35e4c req-dcbbbf47-9396-4e8b-85a6-551521a4ce74 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] No waiting events found dispatching network-vif-plugged-5c910418-515a-499f-91e1-02b95ee9f010 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1686.116621] env[67424]: WARNING nova.compute.manager [req-31afa49f-b313-42ba-a808-f4e26ee35e4c req-dcbbbf47-9396-4e8b-85a6-551521a4ce74 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Received unexpected event network-vif-plugged-5c910418-515a-499f-91e1-02b95ee9f010 for instance with vm_state building and task_state spawning. [ 1686.500167] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Successfully updated port: 5c910418-515a-499f-91e1-02b95ee9f010 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1686.516232] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "refresh_cache-a909e0f2-5717-469f-83f2-4b07f03e2ff6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.516232] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "refresh_cache-a909e0f2-5717-469f-83f2-4b07f03e2ff6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.516232] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1686.591875] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1686.847913] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Updating instance_info_cache with network_info: [{"id": "5c910418-515a-499f-91e1-02b95ee9f010", "address": "fa:16:3e:46:ca:96", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c910418-51", "ovs_interfaceid": "5c910418-515a-499f-91e1-02b95ee9f010", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.859353] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "refresh_cache-a909e0f2-5717-469f-83f2-4b07f03e2ff6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.859644] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Instance network_info: |[{"id": "5c910418-515a-499f-91e1-02b95ee9f010", "address": "fa:16:3e:46:ca:96", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c910418-51", "ovs_interfaceid": "5c910418-515a-499f-91e1-02b95ee9f010", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1686.860058] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:ca:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c910418-515a-499f-91e1-02b95ee9f010', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1686.867690] env[67424]: DEBUG oslo.service.loopingcall [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.868179] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1686.868417] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96e01fd5-c610-47fc-873c-671e875059b0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.889412] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1686.889412] env[67424]: value = "task-3200055" [ 1686.889412] env[67424]: _type = "Task" [ 1686.889412] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.898305] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200055, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.400456] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200055, 'name': CreateVM_Task, 'duration_secs': 0.286629} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.400635] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1687.401351] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.401535] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.401843] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1687.402114] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dd7d81d-5df0-482b-af2e-7a25750e68fb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.407042] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 1687.407042] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]522ac9d0-cdf7-ec07-998c-300f3bca0dea" [ 1687.407042] env[67424]: _type = "Task" [ 1687.407042] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.414813] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]522ac9d0-cdf7-ec07-998c-300f3bca0dea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.917688] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.918057] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1687.918314] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.312035] env[67424]: DEBUG nova.compute.manager [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Received event network-changed-5c910418-515a-499f-91e1-02b95ee9f010 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1688.312221] env[67424]: DEBUG nova.compute.manager [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Refreshing instance network info cache due to event network-changed-5c910418-515a-499f-91e1-02b95ee9f010. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1688.312441] env[67424]: DEBUG oslo_concurrency.lockutils [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] Acquiring lock "refresh_cache-a909e0f2-5717-469f-83f2-4b07f03e2ff6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.312640] env[67424]: DEBUG oslo_concurrency.lockutils [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] Acquired lock "refresh_cache-a909e0f2-5717-469f-83f2-4b07f03e2ff6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.312899] env[67424]: DEBUG nova.network.neutron [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Refreshing network info cache for port 5c910418-515a-499f-91e1-02b95ee9f010 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1688.601069] env[67424]: DEBUG nova.network.neutron [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Updated VIF entry in instance network info cache for port 5c910418-515a-499f-91e1-02b95ee9f010. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1688.601460] env[67424]: DEBUG nova.network.neutron [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Updating instance_info_cache with network_info: [{"id": "5c910418-515a-499f-91e1-02b95ee9f010", "address": "fa:16:3e:46:ca:96", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c910418-51", "ovs_interfaceid": "5c910418-515a-499f-91e1-02b95ee9f010", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.611660] env[67424]: DEBUG oslo_concurrency.lockutils [req-2f93fbd5-c22e-46f7-87d4-1654bfafcfad req-530f4d37-4065-45bd-bf50-28af2cdee856 service nova] Releasing lock "refresh_cache-a909e0f2-5717-469f-83f2-4b07f03e2ff6" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.387093] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.921824] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.391655] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1698.391655] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1699.391576] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.383603] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.387302] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.454789] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.454789] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.388020] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1702.387599] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1702.398739] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.399045] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.399136] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.399285] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1702.400771] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ff27ab-c155-4344-9279-3f10489c45ac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.409304] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6630aea-70d2-46bb-b536-ea357d9d58b7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.423134] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6620aa3-0ecb-44b9-949e-072da0248403 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.429933] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d7aeac-1c3a-44c6-91be-fd10e5cb31d6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.459136] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180974MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1702.459300] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.459491] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.532528] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 15544bb1-1353-4b19-ac1e-967f2e43713e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.532688] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.532815] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.532935] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.533069] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.533187] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.533303] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.533417] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.533534] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.533639] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1702.544735] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 1ce795f5-58c4-4f28-9ae5-07c5dad82c2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1702.555146] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance dc0e72cb-20af-4116-86a4-94b464272cff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1702.565600] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1702.575826] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1702.584672] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1702.584801] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1702.584941] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1702.600806] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1702.616015] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1702.616412] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1702.627425] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1702.644455] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1702.802561] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1677d13-b5bb-4da3-b67d-ff83d0d26d32 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.811445] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284a5ee0-544d-4f69-a842-17cbcf5ec3d5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.840589] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421742cc-6692-4b99-b1e3-ad43efcc1c0a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.847342] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ad2e31-cdcb-4d04-9b92-8ca2beed2188 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.860410] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1702.870176] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1702.887017] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1702.887256] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.428s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.887348] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1706.387711] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1706.388085] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1706.388085] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1706.407825] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.407994] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.408273] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.408430] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.408563] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.408687] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.408810] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.408929] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.409060] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.409180] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1706.409299] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1711.405599] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1732.751064] env[67424]: WARNING oslo_vmware.rw_handles [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1732.751064] env[67424]: ERROR oslo_vmware.rw_handles [ 1732.751815] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1732.753337] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1732.753570] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Copying Virtual Disk [datastore2] vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/0d5047e6-198b-415a-bdc6-4ecf02cb5f30/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1732.753853] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18346cc7-334d-463c-b4f4-6f277f72db07 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.761525] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 1732.761525] env[67424]: value = "task-3200056" [ 1732.761525] env[67424]: _type = "Task" [ 1732.761525] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.769275] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': task-3200056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.272438] env[67424]: DEBUG oslo_vmware.exceptions [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1733.272785] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.273405] env[67424]: ERROR nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1733.273405] env[67424]: Faults: ['InvalidArgument'] [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Traceback (most recent call last): [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] yield resources [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self.driver.spawn(context, instance, image_meta, [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self._fetch_image_if_missing(context, vi) [ 1733.273405] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] image_cache(vi, tmp_image_ds_loc) [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] vm_util.copy_virtual_disk( [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] session._wait_for_task(vmdk_copy_task) [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] return self.wait_for_task(task_ref) [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] return evt.wait() [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] result = hub.switch() [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1733.273752] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] return self.greenlet.switch() [ 1733.274115] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1733.274115] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self.f(*self.args, **self.kw) [ 1733.274115] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1733.274115] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] raise exceptions.translate_fault(task_info.error) [ 1733.274115] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1733.274115] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Faults: ['InvalidArgument'] [ 1733.274115] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] [ 1733.274115] env[67424]: INFO nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Terminating instance [ 1733.275478] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.275644] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1733.276268] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1733.276461] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1733.276684] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d9ad99b-39c9-4742-9e64-24303eebffdf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.278947] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d749031-0afa-46af-b4b5-bbbfe0c950ff {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.285576] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1733.285789] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94d794d4-f776-4fdb-a25c-7800b53db79a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.287889] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1733.288072] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1733.288973] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86b8992c-9852-4b53-b999-c17009a57996 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.293230] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 1733.293230] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52f62df9-e73a-420b-d060-ba54a71d72db" [ 1733.293230] env[67424]: _type = "Task" [ 1733.293230] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.300182] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52f62df9-e73a-420b-d060-ba54a71d72db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.350742] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1733.350962] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1733.351166] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Deleting the datastore file [datastore2] 15544bb1-1353-4b19-ac1e-967f2e43713e {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1733.351438] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2111c594-1d8f-409f-b162-f98a4a7d6656 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.356924] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 1733.356924] env[67424]: value = "task-3200058" [ 1733.356924] env[67424]: _type = "Task" [ 1733.356924] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.364132] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': task-3200058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.803814] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1733.804202] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating directory with path [datastore2] vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1733.804336] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-244f3b75-59f3-45ca-b2d7-d249f7b0f4f3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.815392] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created directory with path [datastore2] vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1733.815578] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Fetch image to [datastore2] vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1733.815748] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1733.816470] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e11eaf-ee0c-4455-8756-13305e54e7cc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.822786] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df75359-6da3-4b29-9d4d-6b88037989e8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.831344] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa84fac1-abe1-4c1a-bf36-1225263b6adc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.864540] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87528bb-de93-4669-b8bf-862a76fa3ddd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.871017] env[67424]: DEBUG oslo_vmware.api [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': task-3200058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090259} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.872325] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1733.872516] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1733.872685] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1733.872860] env[67424]: INFO nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1733.874832] env[67424]: DEBUG nova.compute.claims [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1733.875021] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.875237] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.877693] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fd75bb6c-2643-4d8d-8476-3f522e5c75a8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.899031] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1733.955872] env[67424]: DEBUG oslo_vmware.rw_handles [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1734.016665] env[67424]: DEBUG oslo_vmware.rw_handles [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1734.016887] env[67424]: DEBUG oslo_vmware.rw_handles [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1734.149018] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40c87f7-41d1-435b-813e-0477d88da18c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.155510] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f42e240-20bd-4b87-adc7-74d9532c37de {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.185272] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee3586f-f876-439e-b9c7-8b7a0773beab {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.192271] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d09dd27-f5be-4aac-a590-ee7979f0f9aa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.204849] env[67424]: DEBUG nova.compute.provider_tree [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1734.213046] env[67424]: DEBUG nova.scheduler.client.report [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1734.226705] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.351s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.227257] env[67424]: ERROR nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1734.227257] env[67424]: Faults: ['InvalidArgument'] [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Traceback (most recent call last): [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self.driver.spawn(context, instance, image_meta, [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self._fetch_image_if_missing(context, vi) [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] image_cache(vi, tmp_image_ds_loc) [ 1734.227257] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] vm_util.copy_virtual_disk( [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] session._wait_for_task(vmdk_copy_task) [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] return self.wait_for_task(task_ref) [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] return evt.wait() [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] result = hub.switch() [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] return self.greenlet.switch() [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1734.227633] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] self.f(*self.args, **self.kw) [ 1734.228014] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1734.228014] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] raise exceptions.translate_fault(task_info.error) [ 1734.228014] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1734.228014] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Faults: ['InvalidArgument'] [ 1734.228014] env[67424]: ERROR nova.compute.manager [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] [ 1734.228014] env[67424]: DEBUG nova.compute.utils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1734.229319] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Build of instance 15544bb1-1353-4b19-ac1e-967f2e43713e was re-scheduled: A specified parameter was not correct: fileType [ 1734.229319] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1734.229670] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1734.229842] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1734.230018] env[67424]: DEBUG nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1734.230184] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1734.505183] env[67424]: DEBUG nova.network.neutron [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.515845] env[67424]: INFO nova.compute.manager [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Took 0.29 seconds to deallocate network for instance. [ 1734.607557] env[67424]: INFO nova.scheduler.client.report [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Deleted allocations for instance 15544bb1-1353-4b19-ac1e-967f2e43713e [ 1734.631725] env[67424]: DEBUG oslo_concurrency.lockutils [None req-610f7b15-f726-4ff3-ac0a-a6f307b95b16 tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 666.278s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.632630] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 469.881s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.633237] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "15544bb1-1353-4b19-ac1e-967f2e43713e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.633513] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.633728] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.635641] env[67424]: INFO nova.compute.manager [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Terminating instance [ 1734.637337] env[67424]: DEBUG nova.compute.manager [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1734.637551] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1734.638223] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46cbc1bb-eab3-4a58-a4ba-c85d966bb210 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.642442] env[67424]: DEBUG nova.compute.manager [None req-d99cd2ac-50fd-4dbf-8075-6087ffb051f4 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 251ac24f-18d2-42e6-ba08-87ca676f9261] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1734.649292] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c3bde0-d2a8-4680-a081-5d84b010798d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.679930] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 15544bb1-1353-4b19-ac1e-967f2e43713e could not be found. [ 1734.680158] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1734.680338] env[67424]: INFO nova.compute.manager [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1734.680579] env[67424]: DEBUG oslo.service.loopingcall [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.680971] env[67424]: DEBUG nova.compute.manager [None req-d99cd2ac-50fd-4dbf-8075-6087ffb051f4 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 251ac24f-18d2-42e6-ba08-87ca676f9261] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1734.682237] env[67424]: DEBUG nova.compute.manager [-] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1734.682391] env[67424]: DEBUG nova.network.neutron [-] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1734.703678] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d99cd2ac-50fd-4dbf-8075-6087ffb051f4 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "251ac24f-18d2-42e6-ba08-87ca676f9261" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.672s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.706580] env[67424]: DEBUG nova.network.neutron [-] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.713901] env[67424]: INFO nova.compute.manager [-] [instance: 15544bb1-1353-4b19-ac1e-967f2e43713e] Took 0.03 seconds to deallocate network for instance. [ 1734.719164] env[67424]: DEBUG nova.compute.manager [None req-699f4f2a-d65b-41af-b511-d086df2379e2 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] [instance: 1ce795f5-58c4-4f28-9ae5-07c5dad82c2d] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1734.744875] env[67424]: DEBUG nova.compute.manager [None req-699f4f2a-d65b-41af-b511-d086df2379e2 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] [instance: 1ce795f5-58c4-4f28-9ae5-07c5dad82c2d] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1734.768445] env[67424]: DEBUG oslo_concurrency.lockutils [None req-699f4f2a-d65b-41af-b511-d086df2379e2 tempest-AttachVolumeNegativeTest-549120690 tempest-AttachVolumeNegativeTest-549120690-project-member] Lock "1ce795f5-58c4-4f28-9ae5-07c5dad82c2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.839s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.777632] env[67424]: DEBUG nova.compute.manager [None req-9818767a-2b57-4ddf-a88d-ae28030a3135 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: dc0e72cb-20af-4116-86a4-94b464272cff] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1734.810212] env[67424]: DEBUG nova.compute.manager [None req-9818767a-2b57-4ddf-a88d-ae28030a3135 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] [instance: dc0e72cb-20af-4116-86a4-94b464272cff] Instance disappeared before build. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1734.820805] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c59848e3-c525-461d-8750-4fe8f199e18b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "15544bb1-1353-4b19-ac1e-967f2e43713e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.832405] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9818767a-2b57-4ddf-a88d-ae28030a3135 tempest-ImagesTestJSON-1398473068 tempest-ImagesTestJSON-1398473068-project-member] Lock "dc0e72cb-20af-4116-86a4-94b464272cff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.539s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.840897] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1734.888201] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.888445] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.889888] env[67424]: INFO nova.compute.claims [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1735.086408] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f6727c-cd84-4110-9100-45b8624655aa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.096050] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510277e1-14b1-4fdc-89d9-5f205ff90814 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.140411] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8045130-bf75-4e05-8860-eb90b923d65f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.147928] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc984182-3a49-41a2-a333-6dca42e29da3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.161429] env[67424]: DEBUG nova.compute.provider_tree [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.170037] env[67424]: DEBUG nova.scheduler.client.report [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1735.184987] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.185475] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1735.217104] env[67424]: DEBUG nova.compute.utils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1735.218578] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1735.218780] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1735.227054] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1735.295509] env[67424]: DEBUG nova.policy [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ac8098a2a904b4292a23bc38e8be219', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc66a2bf57d34e309f0f21a60c224076', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1735.298711] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1735.326297] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1735.326511] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1735.326712] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1735.326875] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1735.327028] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1735.327185] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1735.327388] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1735.327545] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1735.327705] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1735.327868] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1735.328048] env[67424]: DEBUG nova.virt.hardware [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1735.328903] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56588d94-b5ea-48e3-8de9-ce3c1a61e6d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.337124] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad2d4cc-7ae9-44a4-a803-5605a1b3f747 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.875961] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Successfully created port: dc772531-7d0e-4625-bf11-056d54e4e8fa {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1736.627954] env[67424]: DEBUG nova.compute.manager [req-3f86a491-a7d8-457e-ae9f-db6271f6c5df req-d61fd1e1-a615-4a3b-a002-fd387bb8c3bb service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Received event network-vif-plugged-dc772531-7d0e-4625-bf11-056d54e4e8fa {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1736.628206] env[67424]: DEBUG oslo_concurrency.lockutils [req-3f86a491-a7d8-457e-ae9f-db6271f6c5df req-d61fd1e1-a615-4a3b-a002-fd387bb8c3bb service nova] Acquiring lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.628415] env[67424]: DEBUG oslo_concurrency.lockutils [req-3f86a491-a7d8-457e-ae9f-db6271f6c5df req-d61fd1e1-a615-4a3b-a002-fd387bb8c3bb service nova] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.628718] env[67424]: DEBUG oslo_concurrency.lockutils [req-3f86a491-a7d8-457e-ae9f-db6271f6c5df req-d61fd1e1-a615-4a3b-a002-fd387bb8c3bb service nova] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.628939] env[67424]: DEBUG nova.compute.manager [req-3f86a491-a7d8-457e-ae9f-db6271f6c5df req-d61fd1e1-a615-4a3b-a002-fd387bb8c3bb service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] No waiting events found dispatching network-vif-plugged-dc772531-7d0e-4625-bf11-056d54e4e8fa {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1736.629219] env[67424]: WARNING nova.compute.manager [req-3f86a491-a7d8-457e-ae9f-db6271f6c5df req-d61fd1e1-a615-4a3b-a002-fd387bb8c3bb service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Received unexpected event network-vif-plugged-dc772531-7d0e-4625-bf11-056d54e4e8fa for instance with vm_state building and task_state spawning. [ 1736.729924] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Successfully updated port: dc772531-7d0e-4625-bf11-056d54e4e8fa {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1736.739798] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "refresh_cache-bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.739933] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "refresh_cache-bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.740098] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1736.819070] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1737.006463] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Updating instance_info_cache with network_info: [{"id": "dc772531-7d0e-4625-bf11-056d54e4e8fa", "address": "fa:16:3e:6f:d1:b5", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc772531-7d", "ovs_interfaceid": "dc772531-7d0e-4625-bf11-056d54e4e8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.018585] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "refresh_cache-bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.018885] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Instance network_info: |[{"id": "dc772531-7d0e-4625-bf11-056d54e4e8fa", "address": "fa:16:3e:6f:d1:b5", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc772531-7d", "ovs_interfaceid": "dc772531-7d0e-4625-bf11-056d54e4e8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1737.019330] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:d1:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc772531-7d0e-4625-bf11-056d54e4e8fa', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1737.026836] env[67424]: DEBUG oslo.service.loopingcall [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1737.027276] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1737.027503] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a96fc13-cfbb-42d1-bcf7-c0d346e19397 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.047717] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1737.047717] env[67424]: value = "task-3200059" [ 1737.047717] env[67424]: _type = "Task" [ 1737.047717] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.054890] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200059, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.559028] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200059, 'name': CreateVM_Task, 'duration_secs': 0.295004} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.559028] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1737.559277] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.559452] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.559756] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1737.559996] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41bea0a7-1813-4e18-b190-ce5a28ea7642 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.564437] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 1737.564437] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52263165-a909-95a7-35cd-2b5be7357ea0" [ 1737.564437] env[67424]: _type = "Task" [ 1737.564437] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.571871] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52263165-a909-95a7-35cd-2b5be7357ea0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.076108] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.076439] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1738.076579] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.780058] env[67424]: DEBUG nova.compute.manager [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Received event network-changed-dc772531-7d0e-4625-bf11-056d54e4e8fa {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1738.780280] env[67424]: DEBUG nova.compute.manager [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Refreshing instance network info cache due to event network-changed-dc772531-7d0e-4625-bf11-056d54e4e8fa. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1738.780499] env[67424]: DEBUG oslo_concurrency.lockutils [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] Acquiring lock "refresh_cache-bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.780639] env[67424]: DEBUG oslo_concurrency.lockutils [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] Acquired lock "refresh_cache-bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.780797] env[67424]: DEBUG nova.network.neutron [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Refreshing network info cache for port dc772531-7d0e-4625-bf11-056d54e4e8fa {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1739.149366] env[67424]: DEBUG nova.network.neutron [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Updated VIF entry in instance network info cache for port dc772531-7d0e-4625-bf11-056d54e4e8fa. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1739.149750] env[67424]: DEBUG nova.network.neutron [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Updating instance_info_cache with network_info: [{"id": "dc772531-7d0e-4625-bf11-056d54e4e8fa", "address": "fa:16:3e:6f:d1:b5", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc772531-7d", "ovs_interfaceid": "dc772531-7d0e-4625-bf11-056d54e4e8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.158957] env[67424]: DEBUG oslo_concurrency.lockutils [req-f1950d83-800a-4d19-b121-4a89f348229b req-87e57a69-fc82-4fc5-a437-30d5ccca750b service nova] Releasing lock "refresh_cache-bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.388123] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.388063] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.388428] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.388545] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1762.383280] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1762.386842] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1762.387040] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1762.399698] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.399910] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.400092] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.400249] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1762.401364] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6001a94-72e6-4e83-89ef-1d10858be5b5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.410439] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faca3c51-586b-4230-9906-6ba969eec042 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.424098] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b6d13b-a1a7-40ba-a785-f501d2488604 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.429978] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f945319-1a16-4475-b568-72b09c8a839a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.458326] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180997MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1762.458480] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.458669] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.531887] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532054] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532185] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532306] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532425] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532540] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532653] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532768] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532882] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.532988] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1762.543034] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1762.552767] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1762.552977] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1762.553137] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1762.687695] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42af51cb-a9ca-4edf-9714-3d41715e5ae0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.696407] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3dc16d6-130f-4b7f-a5f3-9c262775acf2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.725073] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bf084d-5e49-42c8-b746-872cb7fe167e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.731639] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb8fe27-a5fc-47c0-851f-93c2accf3dc7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.744012] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.752141] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1762.765816] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1762.765999] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.307s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.766958] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1765.388637] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1767.388055] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1767.388433] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1767.388433] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1767.408977] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409187] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409276] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409401] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409523] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409642] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409760] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409876] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.409992] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.410121] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1767.410239] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1782.768575] env[67424]: WARNING oslo_vmware.rw_handles [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1782.768575] env[67424]: ERROR oslo_vmware.rw_handles [ 1782.769516] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1782.771177] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1782.771442] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Copying Virtual Disk [datastore2] vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/9022b369-d08c-472f-8311-4c96ca260316/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1782.771736] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a81d190-07e7-4caa-884e-c194207eb0b3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.780175] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 1782.780175] env[67424]: value = "task-3200060" [ 1782.780175] env[67424]: _type = "Task" [ 1782.780175] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.787960] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.291662] env[67424]: DEBUG oslo_vmware.exceptions [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1783.291939] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.292505] env[67424]: ERROR nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1783.292505] env[67424]: Faults: ['InvalidArgument'] [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Traceback (most recent call last): [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] yield resources [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self.driver.spawn(context, instance, image_meta, [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self._fetch_image_if_missing(context, vi) [ 1783.292505] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] image_cache(vi, tmp_image_ds_loc) [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] vm_util.copy_virtual_disk( [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] session._wait_for_task(vmdk_copy_task) [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] return self.wait_for_task(task_ref) [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] return evt.wait() [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] result = hub.switch() [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1783.292912] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] return self.greenlet.switch() [ 1783.293302] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1783.293302] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self.f(*self.args, **self.kw) [ 1783.293302] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1783.293302] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] raise exceptions.translate_fault(task_info.error) [ 1783.293302] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1783.293302] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Faults: ['InvalidArgument'] [ 1783.293302] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] [ 1783.293302] env[67424]: INFO nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Terminating instance [ 1783.294370] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.294573] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1783.294804] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3056cd4-ed46-4516-ba7d-70b40f4e9eff {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.296901] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1783.297110] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1783.297831] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d1032e-4e61-4ad2-8d82-7cd36caabd51 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.304384] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1783.304583] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f13253f0-58f6-40f9-a942-0609e6e0e228 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.306588] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1783.306756] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1783.307681] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29845334-73a0-4eca-b43a-4e6ae8b9df46 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.312063] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 1783.312063] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]526c4611-f810-6ba2-be9d-448945f8462c" [ 1783.312063] env[67424]: _type = "Task" [ 1783.312063] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.375394] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1783.375658] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1783.375849] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleting the datastore file [datastore2] 4c3e649d-52e8-4c3d-9f0b-19077db44543 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1783.376131] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5d5f85b-da4b-4039-9b56-16da34e1fc4a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.382384] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 1783.382384] env[67424]: value = "task-3200062" [ 1783.382384] env[67424]: _type = "Task" [ 1783.382384] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.389778] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.823731] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1783.824023] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating directory with path [datastore2] vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1783.824275] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87adbe22-f4e1-4c59-ba6d-d40cd66bdff3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.835524] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created directory with path [datastore2] vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1783.835712] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Fetch image to [datastore2] vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1783.835881] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1783.836606] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5714ee-0f96-4844-89ab-24d366a4b844 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.842842] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ab4ad3-9533-4fd9-be4d-09d1cba54536 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.851430] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddaac0be-c77d-4862-afd7-7e5f732109b3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.881201] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b7a12a-d786-4666-a9f5-82fe867cbdfa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.891118] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3a8219bf-2970-44c3-8886-40baee24ccaa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.892687] env[67424]: DEBUG oslo_vmware.api [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074681} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.892912] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1783.893097] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1783.893270] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1783.893439] env[67424]: INFO nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1783.895520] env[67424]: DEBUG nova.compute.claims [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1783.895689] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.895899] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.914419] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1783.965439] env[67424]: DEBUG oslo_vmware.rw_handles [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1784.026486] env[67424]: DEBUG oslo_vmware.rw_handles [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1784.026703] env[67424]: DEBUG oslo_vmware.rw_handles [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1784.134047] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8590374-7fbf-4472-bd9f-ad23d3b97b53 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.141046] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717d0c10-d47e-4845-b188-69527a1aff5f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.170350] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8f1ad1-95c3-4a53-ac9b-88ea86653904 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.177193] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879bc1e3-8eec-4e34-a89b-e2e368b30410 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.189846] env[67424]: DEBUG nova.compute.provider_tree [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1784.198168] env[67424]: DEBUG nova.scheduler.client.report [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1784.211187] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.315s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.211655] env[67424]: ERROR nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1784.211655] env[67424]: Faults: ['InvalidArgument'] [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Traceback (most recent call last): [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self.driver.spawn(context, instance, image_meta, [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self._fetch_image_if_missing(context, vi) [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] image_cache(vi, tmp_image_ds_loc) [ 1784.211655] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] vm_util.copy_virtual_disk( [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] session._wait_for_task(vmdk_copy_task) [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] return self.wait_for_task(task_ref) [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] return evt.wait() [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] result = hub.switch() [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] return self.greenlet.switch() [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1784.212086] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] self.f(*self.args, **self.kw) [ 1784.212456] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1784.212456] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] raise exceptions.translate_fault(task_info.error) [ 1784.212456] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1784.212456] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Faults: ['InvalidArgument'] [ 1784.212456] env[67424]: ERROR nova.compute.manager [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] [ 1784.212456] env[67424]: DEBUG nova.compute.utils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1784.213826] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Build of instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 was re-scheduled: A specified parameter was not correct: fileType [ 1784.213826] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1784.214198] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1784.214367] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1784.214548] env[67424]: DEBUG nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1784.214793] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1784.495100] env[67424]: DEBUG nova.network.neutron [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.507234] env[67424]: INFO nova.compute.manager [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Took 0.29 seconds to deallocate network for instance. [ 1784.605115] env[67424]: INFO nova.scheduler.client.report [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleted allocations for instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 [ 1784.624652] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0f400894-b908-4aee-9fee-5f7089957e86 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 675.802s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.625786] env[67424]: DEBUG oslo_concurrency.lockutils [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.407s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.626017] env[67424]: DEBUG oslo_concurrency.lockutils [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "4c3e649d-52e8-4c3d-9f0b-19077db44543-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.626227] env[67424]: DEBUG oslo_concurrency.lockutils [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.626459] env[67424]: DEBUG oslo_concurrency.lockutils [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.628451] env[67424]: INFO nova.compute.manager [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Terminating instance [ 1784.630136] env[67424]: DEBUG nova.compute.manager [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1784.630333] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1784.630801] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-933a4a03-cec1-450e-9177-c038f71b0cd3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.641402] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75ee38e-83da-40fa-9142-37bb6b3d0505 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.652170] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1784.674253] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c3e649d-52e8-4c3d-9f0b-19077db44543 could not be found. [ 1784.674457] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1784.674634] env[67424]: INFO nova.compute.manager [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1784.674880] env[67424]: DEBUG oslo.service.loopingcall [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1784.675121] env[67424]: DEBUG nova.compute.manager [-] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1784.675219] env[67424]: DEBUG nova.network.neutron [-] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1784.698417] env[67424]: DEBUG nova.network.neutron [-] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.700361] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.700583] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.702012] env[67424]: INFO nova.compute.claims [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1784.706316] env[67424]: INFO nova.compute.manager [-] [instance: 4c3e649d-52e8-4c3d-9f0b-19077db44543] Took 0.03 seconds to deallocate network for instance. [ 1784.791836] env[67424]: DEBUG oslo_concurrency.lockutils [None req-711fd911-8649-42a8-9a27-ce673ae7202e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "4c3e649d-52e8-4c3d-9f0b-19077db44543" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.878192] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f5ebf9-225d-41a3-8885-c70cf87a9ce7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.885710] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d26b37d-336f-43b7-a6b0-fbccff809882 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.916876] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef38a4f-5c37-47bc-9c5b-37c8d9db96bc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.923713] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae134b6-2e7e-4c76-9564-cf5d103f439e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.936916] env[67424]: DEBUG nova.compute.provider_tree [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1784.945635] env[67424]: DEBUG nova.scheduler.client.report [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1784.958232] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.258s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.958684] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1784.991761] env[67424]: DEBUG nova.compute.utils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1784.993367] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1784.993537] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1785.005535] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1785.048708] env[67424]: DEBUG nova.policy [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b65c4e3396554092b2d2227443e1566e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1175d0fb2c454022bcc36081c9df063d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1785.074700] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1785.100191] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1785.100450] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1785.100607] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1785.100785] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1785.100931] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1785.101089] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1785.101302] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1785.101461] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1785.101631] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1785.101802] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1785.101975] env[67424]: DEBUG nova.virt.hardware [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1785.102868] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46817a4-c6c3-4283-9024-45565a0d93dc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.110993] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8076e6-97b0-4255-9a8d-74111ebafdf2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.460615] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Successfully created port: 06a9d4e8-39b2-4c6a-9c67-f19ba72172b5 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1786.427079] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Successfully updated port: 06a9d4e8-39b2-4c6a-9c67-f19ba72172b5 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1786.439234] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "refresh_cache-08d3abb5-1041-4dd2-a0e3-af33e7c2194f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.439390] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "refresh_cache-08d3abb5-1041-4dd2-a0e3-af33e7c2194f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.439538] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1786.525062] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1786.638155] env[67424]: DEBUG nova.compute.manager [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Received event network-vif-plugged-06a9d4e8-39b2-4c6a-9c67-f19ba72172b5 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1786.638451] env[67424]: DEBUG oslo_concurrency.lockutils [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] Acquiring lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.638742] env[67424]: DEBUG oslo_concurrency.lockutils [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.638943] env[67424]: DEBUG oslo_concurrency.lockutils [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.639337] env[67424]: DEBUG nova.compute.manager [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] No waiting events found dispatching network-vif-plugged-06a9d4e8-39b2-4c6a-9c67-f19ba72172b5 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1786.639526] env[67424]: WARNING nova.compute.manager [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Received unexpected event network-vif-plugged-06a9d4e8-39b2-4c6a-9c67-f19ba72172b5 for instance with vm_state building and task_state spawning. [ 1786.639689] env[67424]: DEBUG nova.compute.manager [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Received event network-changed-06a9d4e8-39b2-4c6a-9c67-f19ba72172b5 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1786.639844] env[67424]: DEBUG nova.compute.manager [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Refreshing instance network info cache due to event network-changed-06a9d4e8-39b2-4c6a-9c67-f19ba72172b5. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1786.640023] env[67424]: DEBUG oslo_concurrency.lockutils [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] Acquiring lock "refresh_cache-08d3abb5-1041-4dd2-a0e3-af33e7c2194f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.788998] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Updating instance_info_cache with network_info: [{"id": "06a9d4e8-39b2-4c6a-9c67-f19ba72172b5", "address": "fa:16:3e:fd:32:49", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a9d4e8-39", "ovs_interfaceid": "06a9d4e8-39b2-4c6a-9c67-f19ba72172b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.802267] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "refresh_cache-08d3abb5-1041-4dd2-a0e3-af33e7c2194f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.802565] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Instance network_info: |[{"id": "06a9d4e8-39b2-4c6a-9c67-f19ba72172b5", "address": "fa:16:3e:fd:32:49", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a9d4e8-39", "ovs_interfaceid": "06a9d4e8-39b2-4c6a-9c67-f19ba72172b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1786.802888] env[67424]: DEBUG oslo_concurrency.lockutils [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] Acquired lock "refresh_cache-08d3abb5-1041-4dd2-a0e3-af33e7c2194f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.803137] env[67424]: DEBUG nova.network.neutron [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Refreshing network info cache for port 06a9d4e8-39b2-4c6a-9c67-f19ba72172b5 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1786.804200] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:32:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24210a23-d8ac-4f4f-84ac-dc0636de9a72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06a9d4e8-39b2-4c6a-9c67-f19ba72172b5', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1786.812362] env[67424]: DEBUG oslo.service.loopingcall [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1786.813126] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1786.815284] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74961d73-c846-4cbd-9b3a-7e67cfcd21ab {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.835451] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1786.835451] env[67424]: value = "task-3200063" [ 1786.835451] env[67424]: _type = "Task" [ 1786.835451] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.843231] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200063, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.076834] env[67424]: DEBUG nova.network.neutron [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Updated VIF entry in instance network info cache for port 06a9d4e8-39b2-4c6a-9c67-f19ba72172b5. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1787.077220] env[67424]: DEBUG nova.network.neutron [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Updating instance_info_cache with network_info: [{"id": "06a9d4e8-39b2-4c6a-9c67-f19ba72172b5", "address": "fa:16:3e:fd:32:49", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06a9d4e8-39", "ovs_interfaceid": "06a9d4e8-39b2-4c6a-9c67-f19ba72172b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.087375] env[67424]: DEBUG oslo_concurrency.lockutils [req-819741bf-c8f2-4a25-9584-493213db7f87 req-5e9e5d85-bd7d-4648-ae45-7e5dc80fcf34 service nova] Releasing lock "refresh_cache-08d3abb5-1041-4dd2-a0e3-af33e7c2194f" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.344999] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200063, 'name': CreateVM_Task, 'duration_secs': 0.274767} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.345179] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1787.345850] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.346029] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.346353] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1787.346887] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8df4ec61-39ef-426b-97b9-f258171d248a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.351042] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 1787.351042] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5267e09f-615a-9d31-e645-ba6bb04a4eaf" [ 1787.351042] env[67424]: _type = "Task" [ 1787.351042] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.358430] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5267e09f-615a-9d31-e645-ba6bb04a4eaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.861019] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.861935] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.861935] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.390578] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1813.390880] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances with incomplete migration {{(pid=67424) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1816.388228] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1817.396629] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.387850] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.388158] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1821.389268] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1822.388058] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1822.418588] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.418858] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.418986] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.419155] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1822.420330] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1078ab1-e1b4-4b67-9cfd-1987d237143e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.429017] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4946651e-41fc-490b-81fc-9a98575b3e60 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.443978] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3c6df7-00ac-4ee6-8b38-ccc1651d4709 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.451742] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40c2994-d8e9-48a2-8714-c1e0201aa50d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.480199] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181016MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1822.480375] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.480553] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.624354] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.624547] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.624754] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.624973] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.625211] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.625418] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.625626] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.625783] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.625908] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.626040] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.637086] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1822.637303] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1822.637447] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1822.761643] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3a3a8e-baf9-485f-b47d-58197f5b122a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.768968] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d420a4a5-8a5a-456e-ac3f-6886a1d82ad4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.800243] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981eef80-9ca2-4e95-a054-dab1cd8a2fd0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.807183] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c8b886-f318-4519-ad5e-f11e153cf652 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.819944] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.828757] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1822.842062] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1822.842244] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.362s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.837566] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.838067] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1824.387539] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1824.388397] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1824.388397] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1824.396578] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] There are 0 instances to clean {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1825.397649] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.388258] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.388683] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1827.388683] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1827.408678] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.408855] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.408970] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409108] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409232] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409355] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409474] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409589] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409702] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409903] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1827.409971] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1832.406071] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.738816] env[67424]: WARNING oslo_vmware.rw_handles [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1832.738816] env[67424]: ERROR oslo_vmware.rw_handles [ 1832.738816] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1832.740244] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1832.740244] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Copying Virtual Disk [datastore2] vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/04d8cab3-7c5c-4e63-8163-93888cf1f426/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1832.740244] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e007ae2-28ac-40ed-9a99-08362c6443c1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.748328] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 1832.748328] env[67424]: value = "task-3200064" [ 1832.748328] env[67424]: _type = "Task" [ 1832.748328] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.756626] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.258244] env[67424]: DEBUG oslo_vmware.exceptions [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1833.258569] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.259140] env[67424]: ERROR nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1833.259140] env[67424]: Faults: ['InvalidArgument'] [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Traceback (most recent call last): [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] yield resources [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self.driver.spawn(context, instance, image_meta, [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self._fetch_image_if_missing(context, vi) [ 1833.259140] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] image_cache(vi, tmp_image_ds_loc) [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] vm_util.copy_virtual_disk( [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] session._wait_for_task(vmdk_copy_task) [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] return self.wait_for_task(task_ref) [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] return evt.wait() [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] result = hub.switch() [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1833.259567] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] return self.greenlet.switch() [ 1833.260033] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1833.260033] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self.f(*self.args, **self.kw) [ 1833.260033] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1833.260033] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] raise exceptions.translate_fault(task_info.error) [ 1833.260033] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1833.260033] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Faults: ['InvalidArgument'] [ 1833.260033] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] [ 1833.260033] env[67424]: INFO nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Terminating instance [ 1833.261107] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.261319] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1833.261556] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c3434e8-a61e-46ec-83d6-f2d87f609e47 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.263774] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1833.263971] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1833.264676] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632ba6f6-0f9d-4a24-b6d4-76072a2000ea {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.271355] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1833.271547] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b59ab96-2a4c-413c-b1bb-0927e4b7c71c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.273666] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1833.273842] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1833.274750] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b8eafa2-06fa-4b9e-a8f9-f2eba8d57108 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.279210] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 1833.279210] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]520d0a79-f659-7c5e-5bc9-f176ba80b900" [ 1833.279210] env[67424]: _type = "Task" [ 1833.279210] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.286119] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]520d0a79-f659-7c5e-5bc9-f176ba80b900, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.342687] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1833.342914] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1833.343110] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleting the datastore file [datastore2] 31acf58b-8133-48e3-b942-2aa49a9cea6b {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1833.343378] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fabf9f9-a035-450a-a858-fdfe307c6d5b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.349339] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 1833.349339] env[67424]: value = "task-3200066" [ 1833.349339] env[67424]: _type = "Task" [ 1833.349339] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.357615] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.789038] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1833.789328] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating directory with path [datastore2] vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1833.789541] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97dabc2e-a22d-4292-b2a6-79be45b231ea {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.801246] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created directory with path [datastore2] vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1833.801447] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Fetch image to [datastore2] vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1833.801614] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1833.802373] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985b7518-88b4-4c5f-9ff1-17e33a57dc48 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.809027] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd12690-b732-49c4-8587-ed3e32e650f0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.817824] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02b9326-ee94-4e80-82d6-bb969e605fa1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.848460] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a34cf82-4f7a-45da-9804-0283d9641bd1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.859781] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ed0df9b4-b80f-4aee-8545-b1c558b745be {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.861465] env[67424]: DEBUG oslo_vmware.api [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074619} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.861708] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1833.861886] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1833.862069] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1833.862243] env[67424]: INFO nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1833.864306] env[67424]: DEBUG nova.compute.claims [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1833.864461] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.865112] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.886563] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1833.941361] env[67424]: DEBUG oslo_vmware.rw_handles [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1834.006397] env[67424]: DEBUG oslo_vmware.rw_handles [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1834.006555] env[67424]: DEBUG oslo_vmware.rw_handles [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1834.103789] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65dcf24a-5b17-4abe-b0a9-1b1ce99ab297 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.111599] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f2b681-870a-4f0e-9519-b2afe86a51ea {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.141955] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e00dfa-4ada-4b83-b49b-9445c9299ac7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.148395] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21265d1f-09a7-46a9-ace8-148b8078ff73 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.160882] env[67424]: DEBUG nova.compute.provider_tree [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.169346] env[67424]: DEBUG nova.scheduler.client.report [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1834.182434] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.318s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.182988] env[67424]: ERROR nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1834.182988] env[67424]: Faults: ['InvalidArgument'] [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Traceback (most recent call last): [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self.driver.spawn(context, instance, image_meta, [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self._fetch_image_if_missing(context, vi) [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] image_cache(vi, tmp_image_ds_loc) [ 1834.182988] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] vm_util.copy_virtual_disk( [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] session._wait_for_task(vmdk_copy_task) [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] return self.wait_for_task(task_ref) [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] return evt.wait() [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] result = hub.switch() [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] return self.greenlet.switch() [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1834.183330] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] self.f(*self.args, **self.kw) [ 1834.183771] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1834.183771] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] raise exceptions.translate_fault(task_info.error) [ 1834.183771] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1834.183771] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Faults: ['InvalidArgument'] [ 1834.183771] env[67424]: ERROR nova.compute.manager [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] [ 1834.183771] env[67424]: DEBUG nova.compute.utils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1834.184956] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Build of instance 31acf58b-8133-48e3-b942-2aa49a9cea6b was re-scheduled: A specified parameter was not correct: fileType [ 1834.184956] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1834.185348] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1834.185556] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1834.185755] env[67424]: DEBUG nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1834.185919] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1834.443143] env[67424]: DEBUG nova.network.neutron [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.454233] env[67424]: INFO nova.compute.manager [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Took 0.27 seconds to deallocate network for instance. [ 1834.544500] env[67424]: INFO nova.scheduler.client.report [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleted allocations for instance 31acf58b-8133-48e3-b942-2aa49a9cea6b [ 1834.568398] env[67424]: DEBUG oslo_concurrency.lockutils [None req-639ecc13-007f-4ed6-997f-6a49b7fe0652 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 554.979s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.570241] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 359.056s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.570479] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "31acf58b-8133-48e3-b942-2aa49a9cea6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.570734] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.570913] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.573024] env[67424]: INFO nova.compute.manager [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Terminating instance [ 1834.574693] env[67424]: DEBUG nova.compute.manager [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1834.574833] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1834.575351] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c58d9c52-ab0d-4dfb-bc39-455e2e551d92 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.579964] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1834.587023] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17767934-30ff-409f-9493-c4b90f270ceb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.617380] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31acf58b-8133-48e3-b942-2aa49a9cea6b could not be found. [ 1834.617595] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1834.617775] env[67424]: INFO nova.compute.manager [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1834.618026] env[67424]: DEBUG oslo.service.loopingcall [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1834.622374] env[67424]: DEBUG nova.compute.manager [-] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1834.622478] env[67424]: DEBUG nova.network.neutron [-] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1834.635119] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.635352] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.636776] env[67424]: INFO nova.compute.claims [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1834.646620] env[67424]: DEBUG nova.network.neutron [-] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.656640] env[67424]: INFO nova.compute.manager [-] [instance: 31acf58b-8133-48e3-b942-2aa49a9cea6b] Took 0.03 seconds to deallocate network for instance. [ 1834.744182] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f0ec2821-2d85-4dc6-bf1d-f726dabe1674 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "31acf58b-8133-48e3-b942-2aa49a9cea6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.805403] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3669e529-4edb-49b2-b5d3-b4ca50346313 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.813057] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1ae9d8-ae06-4167-95b7-fd3da64ebe5b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.842723] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7867fa8e-e70e-4490-bb10-3fc65c2a9a1b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.849629] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee7221b-898b-4c1c-aa2e-43d121da579b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.863890] env[67424]: DEBUG nova.compute.provider_tree [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.873121] env[67424]: DEBUG nova.scheduler.client.report [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1834.886828] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.251s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.887359] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1834.922518] env[67424]: DEBUG nova.compute.utils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1834.923728] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1834.923902] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1834.933269] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1834.998572] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1835.026147] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1835.027452] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1835.027452] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1835.027452] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1835.027452] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1835.027452] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1835.027683] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1835.027683] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1835.027683] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1835.027683] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1835.027823] env[67424]: DEBUG nova.virt.hardware [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1835.028691] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df05142-3f52-484c-a48d-a42b2bee2bf4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.032520] env[67424]: DEBUG nova.policy [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92130ff41b244634a170d38f14a7248a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37696549d9e343acb36d6e89f75713d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1835.040187] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6763b4-5954-4287-966a-40431d2d5ac7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.703649] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Successfully created port: 1625a585-3ecd-4f84-9829-e6a26ffb53cd {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1836.478780] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Successfully updated port: 1625a585-3ecd-4f84-9829-e6a26ffb53cd {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1836.484348] env[67424]: DEBUG nova.compute.manager [req-f954ac6b-b3e6-457e-8782-3670f0328b74 req-7d089a2e-25df-44d2-abc9-82cdf2369759 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Received event network-vif-plugged-1625a585-3ecd-4f84-9829-e6a26ffb53cd {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1836.484580] env[67424]: DEBUG oslo_concurrency.lockutils [req-f954ac6b-b3e6-457e-8782-3670f0328b74 req-7d089a2e-25df-44d2-abc9-82cdf2369759 service nova] Acquiring lock "8e178dab-b6bb-4e29-bac9-64ab2b925762-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.484776] env[67424]: DEBUG oslo_concurrency.lockutils [req-f954ac6b-b3e6-457e-8782-3670f0328b74 req-7d089a2e-25df-44d2-abc9-82cdf2369759 service nova] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.484945] env[67424]: DEBUG oslo_concurrency.lockutils [req-f954ac6b-b3e6-457e-8782-3670f0328b74 req-7d089a2e-25df-44d2-abc9-82cdf2369759 service nova] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.485140] env[67424]: DEBUG nova.compute.manager [req-f954ac6b-b3e6-457e-8782-3670f0328b74 req-7d089a2e-25df-44d2-abc9-82cdf2369759 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] No waiting events found dispatching network-vif-plugged-1625a585-3ecd-4f84-9829-e6a26ffb53cd {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1836.485340] env[67424]: WARNING nova.compute.manager [req-f954ac6b-b3e6-457e-8782-3670f0328b74 req-7d089a2e-25df-44d2-abc9-82cdf2369759 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Received unexpected event network-vif-plugged-1625a585-3ecd-4f84-9829-e6a26ffb53cd for instance with vm_state building and task_state spawning. [ 1836.489721] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "refresh_cache-8e178dab-b6bb-4e29-bac9-64ab2b925762" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.489721] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquired lock "refresh_cache-8e178dab-b6bb-4e29-bac9-64ab2b925762" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.489840] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1836.532776] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1836.809027] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Updating instance_info_cache with network_info: [{"id": "1625a585-3ecd-4f84-9829-e6a26ffb53cd", "address": "fa:16:3e:27:90:2f", "network": {"id": "b78ca6e1-efbd-4327-a676-fee9c0461fb1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-166183820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37696549d9e343acb36d6e89f75713d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1625a585-3e", "ovs_interfaceid": "1625a585-3ecd-4f84-9829-e6a26ffb53cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.820956] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Releasing lock "refresh_cache-8e178dab-b6bb-4e29-bac9-64ab2b925762" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.821254] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Instance network_info: |[{"id": "1625a585-3ecd-4f84-9829-e6a26ffb53cd", "address": "fa:16:3e:27:90:2f", "network": {"id": "b78ca6e1-efbd-4327-a676-fee9c0461fb1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-166183820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37696549d9e343acb36d6e89f75713d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1625a585-3e", "ovs_interfaceid": "1625a585-3ecd-4f84-9829-e6a26ffb53cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1836.821640] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:90:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '233536d0-6913-4879-8442-42dcf1d4ecbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1625a585-3ecd-4f84-9829-e6a26ffb53cd', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1836.829059] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Creating folder: Project (37696549d9e343acb36d6e89f75713d3). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1836.829531] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1374d389-b891-4c1d-b414-0c21bfe9e576 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.839469] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Created folder: Project (37696549d9e343acb36d6e89f75713d3) in parent group-v639843. [ 1836.839649] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Creating folder: Instances. Parent ref: group-v639938. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1836.839859] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef99ac62-c358-495d-a36c-1297980cb89f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.847836] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Created folder: Instances in parent group-v639938. [ 1836.848082] env[67424]: DEBUG oslo.service.loopingcall [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1836.848265] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1836.848517] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e488502-deca-4a9e-a16c-49c6ed6c8bfe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.866026] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1836.866026] env[67424]: value = "task-3200069" [ 1836.866026] env[67424]: _type = "Task" [ 1836.866026] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.872931] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200069, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.375183] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200069, 'name': CreateVM_Task, 'duration_secs': 0.272429} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.375347] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1837.376013] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.376190] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.376514] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1837.376754] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b512eebb-958a-4ada-950b-51d6ad7fe519 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.380902] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for the task: (returnval){ [ 1837.380902] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b13229-ea36-66c6-196d-75d4e9c599d7" [ 1837.380902] env[67424]: _type = "Task" [ 1837.380902] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.389376] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b13229-ea36-66c6-196d-75d4e9c599d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.891426] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.891747] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1837.891926] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.546687] env[67424]: DEBUG nova.compute.manager [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Received event network-changed-1625a585-3ecd-4f84-9829-e6a26ffb53cd {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1838.546897] env[67424]: DEBUG nova.compute.manager [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Refreshing instance network info cache due to event network-changed-1625a585-3ecd-4f84-9829-e6a26ffb53cd. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1838.547122] env[67424]: DEBUG oslo_concurrency.lockutils [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] Acquiring lock "refresh_cache-8e178dab-b6bb-4e29-bac9-64ab2b925762" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.547297] env[67424]: DEBUG oslo_concurrency.lockutils [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] Acquired lock "refresh_cache-8e178dab-b6bb-4e29-bac9-64ab2b925762" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.547472] env[67424]: DEBUG nova.network.neutron [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Refreshing network info cache for port 1625a585-3ecd-4f84-9829-e6a26ffb53cd {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1838.884879] env[67424]: DEBUG nova.network.neutron [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Updated VIF entry in instance network info cache for port 1625a585-3ecd-4f84-9829-e6a26ffb53cd. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1838.885267] env[67424]: DEBUG nova.network.neutron [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Updating instance_info_cache with network_info: [{"id": "1625a585-3ecd-4f84-9829-e6a26ffb53cd", "address": "fa:16:3e:27:90:2f", "network": {"id": "b78ca6e1-efbd-4327-a676-fee9c0461fb1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-166183820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37696549d9e343acb36d6e89f75713d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1625a585-3e", "ovs_interfaceid": "1625a585-3ecd-4f84-9829-e6a26ffb53cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.894277] env[67424]: DEBUG oslo_concurrency.lockutils [req-42397539-76ff-4270-8b84-9dec7fb1821e req-d12f4805-dccc-4a44-be3e-33f9a0a10f59 service nova] Releasing lock "refresh_cache-8e178dab-b6bb-4e29-bac9-64ab2b925762" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.702623] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.724631] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Getting list of instances from cluster (obj){ [ 1846.724631] env[67424]: value = "domain-c8" [ 1846.724631] env[67424]: _type = "ClusterComputeResource" [ 1846.724631] env[67424]: } {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1846.725964] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01967e98-5dcb-4e14-84c3-aa6fc80fbfd2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.743499] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Got total of 10 instances {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1846.743656] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.743839] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid ea1ca448-0e06-4548-80cd-9107b43eefe4 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.743996] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid f98aafb6-c8e3-44fd-b942-06e4b6bbc52a {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.744165] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid afd0f239-0752-4e2e-a232-9f22722753f5 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.744315] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 72d5f322-47e3-402e-abcc-1b5b0497bc1f {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.744463] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 7e457262-ef1d-469e-8c36-b0f341a00e9a {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.744607] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid a909e0f2-5717-469f-83f2-4b07f03e2ff6 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.744778] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.744890] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 08d3abb5-1041-4dd2-a0e3-af33e7c2194f {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.745039] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 8e178dab-b6bb-4e29-bac9-64ab2b925762 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1846.745355] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.745574] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.745768] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.745959] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "afd0f239-0752-4e2e-a232-9f22722753f5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.746163] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.746356] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.746543] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.746733] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.746920] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.747124] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.761357] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "461f92ee-b076-4cb7-8170-66cddb898b99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.761649] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "461f92ee-b076-4cb7-8170-66cddb898b99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.916815] env[67424]: DEBUG oslo_concurrency.lockutils [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.432827] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1878.228303] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.247349] env[67424]: WARNING oslo_vmware.rw_handles [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1881.247349] env[67424]: ERROR oslo_vmware.rw_handles [ 1881.248092] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1881.249841] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1881.250080] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Copying Virtual Disk [datastore2] vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/f1b24a30-7b3f-411d-af30-d0eede77946e/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1881.250370] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50d4661c-3e0a-40d6-b4c2-0f29b511852f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.258360] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 1881.258360] env[67424]: value = "task-3200070" [ 1881.258360] env[67424]: _type = "Task" [ 1881.258360] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.265955] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': task-3200070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.387589] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1881.387774] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1881.768487] env[67424]: DEBUG oslo_vmware.exceptions [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1881.768776] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.769387] env[67424]: ERROR nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1881.769387] env[67424]: Faults: ['InvalidArgument'] [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Traceback (most recent call last): [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] yield resources [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self.driver.spawn(context, instance, image_meta, [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self._fetch_image_if_missing(context, vi) [ 1881.769387] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] image_cache(vi, tmp_image_ds_loc) [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] vm_util.copy_virtual_disk( [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] session._wait_for_task(vmdk_copy_task) [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] return self.wait_for_task(task_ref) [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] return evt.wait() [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] result = hub.switch() [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1881.770370] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] return self.greenlet.switch() [ 1881.770842] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1881.770842] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self.f(*self.args, **self.kw) [ 1881.770842] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1881.770842] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] raise exceptions.translate_fault(task_info.error) [ 1881.770842] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1881.770842] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Faults: ['InvalidArgument'] [ 1881.770842] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] [ 1881.770842] env[67424]: INFO nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Terminating instance [ 1881.771264] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.771468] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1881.771728] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a906c3a2-1006-4ffe-b760-559167d83567 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.773891] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1881.774093] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1881.774801] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb39a6c-aae1-40ed-ba8b-88e3d4bd4760 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.781430] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1881.781656] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1ce1b86-b0b0-4f12-b164-4c01975346f7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.783717] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1881.783890] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1881.784799] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-400ebdca-24a1-420d-9bd1-154d7a4fcbbc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.789477] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Waiting for the task: (returnval){ [ 1881.789477] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52008be7-41ef-d7f4-75f9-394ee29dc4f5" [ 1881.789477] env[67424]: _type = "Task" [ 1881.789477] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.796362] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52008be7-41ef-d7f4-75f9-394ee29dc4f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.966494] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1881.966854] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1881.967132] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Deleting the datastore file [datastore2] 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1881.967539] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bac1924-4dcb-4d78-82cd-de183a879261 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.974122] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 1881.974122] env[67424]: value = "task-3200072" [ 1881.974122] env[67424]: _type = "Task" [ 1881.974122] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.981735] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': task-3200072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.299489] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1882.299857] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Creating directory with path [datastore2] vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1882.300019] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86034d13-c081-4a9c-9658-e33199ab40fb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.315389] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Created directory with path [datastore2] vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1882.315681] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Fetch image to [datastore2] vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1882.315794] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1882.316572] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5d2278-d927-4d24-a88b-7ad4251aee4a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.323580] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2f3aa8-b039-44e8-b781-48b29bfe7224 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.332492] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125ee5c2-8636-46e0-be3e-0d449a4633e5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.373607] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b137b32-13b4-4640-bad5-efb7c6c8c719 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.379399] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bfe3983b-5ae3-4a7e-a835-4c7097035664 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.401202] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1882.483431] env[67424]: DEBUG oslo_vmware.api [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': task-3200072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100549} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.483682] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1882.483864] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1882.484047] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1882.484225] env[67424]: INFO nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Took 0.71 seconds to destroy the instance on the hypervisor. [ 1882.486278] env[67424]: DEBUG nova.compute.claims [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1882.486454] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.486678] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.551117] env[67424]: DEBUG oslo_vmware.rw_handles [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1882.615502] env[67424]: DEBUG oslo_vmware.rw_handles [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1882.615707] env[67424]: DEBUG oslo_vmware.rw_handles [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1882.711525] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ca5b4d-7fbf-44f4-b7eb-74c6882710ee {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.718981] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80e2c45-a082-4814-ad14-8b22dd3da669 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.748933] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a791a664-2624-4331-9794-98abb2cb2597 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.755884] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d341f581-338e-49dd-839a-e049a077e27a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.768537] env[67424]: DEBUG nova.compute.provider_tree [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1882.777096] env[67424]: DEBUG nova.scheduler.client.report [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1882.793383] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.793916] env[67424]: ERROR nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1882.793916] env[67424]: Faults: ['InvalidArgument'] [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Traceback (most recent call last): [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self.driver.spawn(context, instance, image_meta, [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self._fetch_image_if_missing(context, vi) [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] image_cache(vi, tmp_image_ds_loc) [ 1882.793916] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] vm_util.copy_virtual_disk( [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] session._wait_for_task(vmdk_copy_task) [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] return self.wait_for_task(task_ref) [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] return evt.wait() [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] result = hub.switch() [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] return self.greenlet.switch() [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1882.794354] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] self.f(*self.args, **self.kw) [ 1882.794764] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1882.794764] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] raise exceptions.translate_fault(task_info.error) [ 1882.794764] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1882.794764] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Faults: ['InvalidArgument'] [ 1882.794764] env[67424]: ERROR nova.compute.manager [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] [ 1882.794764] env[67424]: DEBUG nova.compute.utils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1882.796031] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Build of instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f was re-scheduled: A specified parameter was not correct: fileType [ 1882.796031] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1882.796945] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1882.797160] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1882.797374] env[67424]: DEBUG nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1882.797546] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1883.143383] env[67424]: DEBUG nova.network.neutron [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.156337] env[67424]: INFO nova.compute.manager [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Took 0.36 seconds to deallocate network for instance. [ 1883.247074] env[67424]: INFO nova.scheduler.client.report [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Deleted allocations for instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f [ 1883.273012] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9920d8c7-b427-4a12-a3d0-a46c436823f7 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 595.269s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.274718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 399.463s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.274718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.274718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.275015] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.277134] env[67424]: INFO nova.compute.manager [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Terminating instance [ 1883.278885] env[67424]: DEBUG nova.compute.manager [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1883.279098] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1883.279936] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aaa25567-7345-453f-a175-e79669aecd19 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.291256] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90838e06-a96e-4874-afac-9d056fe535a7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.302793] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1883.325679] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f could not be found. [ 1883.325950] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1883.326191] env[67424]: INFO nova.compute.manager [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1883.326484] env[67424]: DEBUG oslo.service.loopingcall [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1883.326741] env[67424]: DEBUG nova.compute.manager [-] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1883.326846] env[67424]: DEBUG nova.network.neutron [-] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1883.354391] env[67424]: DEBUG nova.network.neutron [-] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.357833] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.358072] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.359505] env[67424]: INFO nova.compute.claims [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1883.362576] env[67424]: INFO nova.compute.manager [-] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] Took 0.04 seconds to deallocate network for instance. [ 1883.383317] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.386842] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.387980] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.454485] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fb8db201-d833-40c4-9211-85c6f6011663 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.456105] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 36.710s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.456105] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 77fc0fac-ad9f-4589-9d11-0dee49a0ba2f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1883.456105] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "77fc0fac-ad9f-4589-9d11-0dee49a0ba2f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.524925] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccdd1ed-7e0c-43aa-b574-02b73e7fb419 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.533061] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b825594-9f9d-44f2-a84a-670048eab904 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.563090] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ca562a-6232-4aa8-bd3e-dad676832155 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.570249] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c88c69b-6e89-4c8c-aced-f59f7eaa03f2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.582826] env[67424]: DEBUG nova.compute.provider_tree [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1883.591980] env[67424]: DEBUG nova.scheduler.client.report [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1883.605717] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.248s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.622082] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "4a4278e7-1533-4926-bfef-ce5fe3e45f8d" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.622312] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "4a4278e7-1533-4926-bfef-ce5fe3e45f8d" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.626835] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "4a4278e7-1533-4926-bfef-ce5fe3e45f8d" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.004s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.627325] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1883.658571] env[67424]: DEBUG nova.compute.utils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1883.659853] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1883.660123] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1883.669776] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1883.724562] env[67424]: DEBUG nova.policy [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16ff55b8c44a4d3fbc6df15877db801b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce359312235c49f1a44b06bd60a518e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1883.734195] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1883.760754] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1883.760992] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1883.761167] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1883.761394] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1883.761556] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1883.761709] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1883.761915] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1883.762086] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1883.762258] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1883.762429] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1883.762599] env[67424]: DEBUG nova.virt.hardware [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1883.763528] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6174879f-4b47-4ddd-8ebf-066bdbd3dafa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.771120] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2d02ad-601a-4042-8094-a14efa0cfa6d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.100632] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Successfully created port: 24968cdb-4f87-499e-aa5b-879a8a991264 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1884.387576] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1884.398788] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.398999] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.399169] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.399315] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1884.400444] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107aa2f8-04eb-4c78-b1d0-e345c33388d3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.409517] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375be826-33e1-4a68-9035-21bc0cb32652 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.423666] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d112d341-5a5f-4a7b-a844-2d7ac6914b81 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.430117] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480f4f0d-8079-460f-9b6d-5ed4857b9148 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.460906] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181013MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1884.461103] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.461366] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.537502] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.537659] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.537783] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.537901] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.538030] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.538157] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.538272] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.538407] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.538558] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.538680] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1884.538863] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1884.538992] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1884.678965] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0307eb-1f7a-48d6-9889-142a638f203c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.688487] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b242e1-310e-4b11-8743-385c0c2fb2cc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.716785] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f28ed5-04ba-43ed-93d7-6b053d051d0a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.724219] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04221ab8-5ee7-43dc-925c-eda204e71b43 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.737416] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.745405] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1884.763790] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1884.763968] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.303s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.846189] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Successfully updated port: 24968cdb-4f87-499e-aa5b-879a8a991264 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1884.855600] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "refresh_cache-461f92ee-b076-4cb7-8170-66cddb898b99" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.855761] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquired lock "refresh_cache-461f92ee-b076-4cb7-8170-66cddb898b99" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.855808] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1884.899466] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1885.102295] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Updating instance_info_cache with network_info: [{"id": "24968cdb-4f87-499e-aa5b-879a8a991264", "address": "fa:16:3e:e2:78:cf", "network": {"id": "6260d73f-f644-4a9b-9e6f-2d8a3921c396", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-429259058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce359312235c49f1a44b06bd60a518e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24968cdb-4f", "ovs_interfaceid": "24968cdb-4f87-499e-aa5b-879a8a991264", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.113963] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Releasing lock "refresh_cache-461f92ee-b076-4cb7-8170-66cddb898b99" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.114270] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Instance network_info: |[{"id": "24968cdb-4f87-499e-aa5b-879a8a991264", "address": "fa:16:3e:e2:78:cf", "network": {"id": "6260d73f-f644-4a9b-9e6f-2d8a3921c396", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-429259058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce359312235c49f1a44b06bd60a518e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24968cdb-4f", "ovs_interfaceid": "24968cdb-4f87-499e-aa5b-879a8a991264", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1885.114685] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:78:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24968cdb-4f87-499e-aa5b-879a8a991264', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1885.122495] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Creating folder: Project (ce359312235c49f1a44b06bd60a518e6). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1885.123031] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03b9f0be-5e5a-477b-9463-4bf9364b7021 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.133143] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Created folder: Project (ce359312235c49f1a44b06bd60a518e6) in parent group-v639843. [ 1885.133329] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Creating folder: Instances. Parent ref: group-v639941. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1885.133548] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f328e41c-2683-4642-82f3-db58da8f9cf4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.142282] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Created folder: Instances in parent group-v639941. [ 1885.142506] env[67424]: DEBUG oslo.service.loopingcall [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.142686] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1885.142875] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-863e66a0-e40f-4bf3-aec5-8fdc2891fc68 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.161274] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1885.161274] env[67424]: value = "task-3200075" [ 1885.161274] env[67424]: _type = "Task" [ 1885.161274] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.169969] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200075, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.188302] env[67424]: DEBUG nova.compute.manager [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Received event network-vif-plugged-24968cdb-4f87-499e-aa5b-879a8a991264 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1885.188443] env[67424]: DEBUG oslo_concurrency.lockutils [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] Acquiring lock "461f92ee-b076-4cb7-8170-66cddb898b99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.188663] env[67424]: DEBUG oslo_concurrency.lockutils [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] Lock "461f92ee-b076-4cb7-8170-66cddb898b99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.188711] env[67424]: DEBUG oslo_concurrency.lockutils [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] Lock "461f92ee-b076-4cb7-8170-66cddb898b99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.188870] env[67424]: DEBUG nova.compute.manager [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] No waiting events found dispatching network-vif-plugged-24968cdb-4f87-499e-aa5b-879a8a991264 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1885.189039] env[67424]: WARNING nova.compute.manager [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Received unexpected event network-vif-plugged-24968cdb-4f87-499e-aa5b-879a8a991264 for instance with vm_state building and task_state spawning. [ 1885.189199] env[67424]: DEBUG nova.compute.manager [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Received event network-changed-24968cdb-4f87-499e-aa5b-879a8a991264 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1885.189430] env[67424]: DEBUG nova.compute.manager [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Refreshing instance network info cache due to event network-changed-24968cdb-4f87-499e-aa5b-879a8a991264. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1885.189733] env[67424]: DEBUG oslo_concurrency.lockutils [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] Acquiring lock "refresh_cache-461f92ee-b076-4cb7-8170-66cddb898b99" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.189921] env[67424]: DEBUG oslo_concurrency.lockutils [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] Acquired lock "refresh_cache-461f92ee-b076-4cb7-8170-66cddb898b99" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.190075] env[67424]: DEBUG nova.network.neutron [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Refreshing network info cache for port 24968cdb-4f87-499e-aa5b-879a8a991264 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1885.465410] env[67424]: DEBUG nova.network.neutron [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Updated VIF entry in instance network info cache for port 24968cdb-4f87-499e-aa5b-879a8a991264. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1885.465827] env[67424]: DEBUG nova.network.neutron [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Updating instance_info_cache with network_info: [{"id": "24968cdb-4f87-499e-aa5b-879a8a991264", "address": "fa:16:3e:e2:78:cf", "network": {"id": "6260d73f-f644-4a9b-9e6f-2d8a3921c396", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-429259058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce359312235c49f1a44b06bd60a518e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24968cdb-4f", "ovs_interfaceid": "24968cdb-4f87-499e-aa5b-879a8a991264", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.475861] env[67424]: DEBUG oslo_concurrency.lockutils [req-2e0ce1e0-373a-4b7c-b0cf-d3b6101019c5 req-96190b99-7c9d-42b2-acbe-00e422fe3044 service nova] Releasing lock "refresh_cache-461f92ee-b076-4cb7-8170-66cddb898b99" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.671932] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200075, 'name': CreateVM_Task, 'duration_secs': 0.326161} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.672124] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1885.672775] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.673022] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.673280] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1885.673547] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bfaee81-ed10-4620-b950-1dd0d5ed571b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.677825] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Waiting for the task: (returnval){ [ 1885.677825] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]522860ca-facc-9e3f-0a45-6e2ba639ae8e" [ 1885.677825] env[67424]: _type = "Task" [ 1885.677825] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.687259] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]522860ca-facc-9e3f-0a45-6e2ba639ae8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.763855] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1885.764095] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1886.188696] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.188696] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1886.188696] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.387995] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.388411] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1888.388411] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1888.411051] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.411206] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.411335] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.411457] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.411578] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.411695] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.411863] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.412008] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.412136] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.412255] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1888.412373] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1894.014914] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "909d3788-23da-446f-9c47-46df54003e1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.015237] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "909d3788-23da-446f-9c47-46df54003e1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.124272] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.365980] env[67424]: WARNING oslo_vmware.rw_handles [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1932.365980] env[67424]: ERROR oslo_vmware.rw_handles [ 1932.366664] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1932.368587] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1932.368837] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Copying Virtual Disk [datastore2] vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/ea487dae-92db-435c-a865-f689f476c2a5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1932.369157] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98df4d4b-b127-434f-b3fd-1fd9e097453b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.377893] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Waiting for the task: (returnval){ [ 1932.377893] env[67424]: value = "task-3200076" [ 1932.377893] env[67424]: _type = "Task" [ 1932.377893] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.385984] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Task: {'id': task-3200076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.887972] env[67424]: DEBUG oslo_vmware.exceptions [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1932.888280] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.888837] env[67424]: ERROR nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1932.888837] env[67424]: Faults: ['InvalidArgument'] [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Traceback (most recent call last): [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] yield resources [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self.driver.spawn(context, instance, image_meta, [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self._fetch_image_if_missing(context, vi) [ 1932.888837] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] image_cache(vi, tmp_image_ds_loc) [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] vm_util.copy_virtual_disk( [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] session._wait_for_task(vmdk_copy_task) [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] return self.wait_for_task(task_ref) [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] return evt.wait() [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] result = hub.switch() [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1932.889397] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] return self.greenlet.switch() [ 1932.889816] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1932.889816] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self.f(*self.args, **self.kw) [ 1932.889816] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1932.889816] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] raise exceptions.translate_fault(task_info.error) [ 1932.889816] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1932.889816] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Faults: ['InvalidArgument'] [ 1932.889816] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] [ 1932.889816] env[67424]: INFO nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Terminating instance [ 1932.890846] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.891063] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1932.891310] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78e18bc6-4048-4aa2-86b1-1babb09e4db6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.893687] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1932.893880] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1932.894641] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0240c733-3a2e-4bec-8e08-67d902772b43 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.901363] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1932.901601] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dca09c3-c4c3-4af7-b3c0-114464b82739 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.903727] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1932.903895] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1932.904842] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9854fd7d-99c0-49dc-84b3-d21c9a474d02 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.909613] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1932.909613] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52c80ca0-27e9-b962-b4b0-738b6e094004" [ 1932.909613] env[67424]: _type = "Task" [ 1932.909613] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.916810] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52c80ca0-27e9-b962-b4b0-738b6e094004, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.971016] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1932.971603] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1932.971819] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Deleting the datastore file [datastore2] ea1ca448-0e06-4548-80cd-9107b43eefe4 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1932.972694] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4e3bdd7-ff19-4a72-98a5-2fd015583d88 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.980516] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Waiting for the task: (returnval){ [ 1932.980516] env[67424]: value = "task-3200078" [ 1932.980516] env[67424]: _type = "Task" [ 1932.980516] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.988068] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Task: {'id': task-3200078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.420138] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1933.420639] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating directory with path [datastore2] vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1933.420639] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6d4fe4f-37dd-4a6d-89e7-52c3efefb249 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.432828] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Created directory with path [datastore2] vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1933.433028] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Fetch image to [datastore2] vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1933.433204] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1933.433922] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b6254c-9f02-4391-aa85-e07daef71a72 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.440435] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455325e3-aa13-415a-8808-1efd8ca0759e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.449258] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39260de6-c3c1-4123-96f8-a2f7c37c2543 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.479999] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee7c4c7-3a68-4daa-b8aa-529d83bd3edb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.490729] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4131cfcc-ad4f-4269-88ce-fe28aa83a3fe {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.492443] env[67424]: DEBUG oslo_vmware.api [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Task: {'id': task-3200078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071893} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.492685] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1933.492862] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1933.493040] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1933.493220] env[67424]: INFO nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1933.495250] env[67424]: DEBUG nova.compute.claims [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1933.495445] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.495656] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.514226] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1933.630755] env[67424]: DEBUG oslo_vmware.rw_handles [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1933.692959] env[67424]: DEBUG oslo_vmware.rw_handles [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1933.693167] env[67424]: DEBUG oslo_vmware.rw_handles [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1933.736647] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80de4b35-b4d4-41ba-9e62-e906af7bc830 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.745020] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947c3500-7dfd-4dd1-9676-640827c2bbe9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.775062] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1194202-13eb-466a-89a8-1a13b1f2cd83 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.782372] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38c1c0d-8904-4cc9-860d-95b04d1577bf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.796148] env[67424]: DEBUG nova.compute.provider_tree [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.805757] env[67424]: DEBUG nova.scheduler.client.report [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1933.821748] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.326s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.822279] env[67424]: ERROR nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1933.822279] env[67424]: Faults: ['InvalidArgument'] [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Traceback (most recent call last): [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self.driver.spawn(context, instance, image_meta, [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self._fetch_image_if_missing(context, vi) [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] image_cache(vi, tmp_image_ds_loc) [ 1933.822279] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] vm_util.copy_virtual_disk( [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] session._wait_for_task(vmdk_copy_task) [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] return self.wait_for_task(task_ref) [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] return evt.wait() [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] result = hub.switch() [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] return self.greenlet.switch() [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1933.822682] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] self.f(*self.args, **self.kw) [ 1933.823037] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1933.823037] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] raise exceptions.translate_fault(task_info.error) [ 1933.823037] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1933.823037] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Faults: ['InvalidArgument'] [ 1933.823037] env[67424]: ERROR nova.compute.manager [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] [ 1933.823037] env[67424]: DEBUG nova.compute.utils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1933.824772] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Build of instance ea1ca448-0e06-4548-80cd-9107b43eefe4 was re-scheduled: A specified parameter was not correct: fileType [ 1933.824772] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1933.825158] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1933.825326] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1933.825496] env[67424]: DEBUG nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1933.825759] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1934.185811] env[67424]: DEBUG nova.network.neutron [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.198611] env[67424]: INFO nova.compute.manager [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Took 0.37 seconds to deallocate network for instance. [ 1934.289279] env[67424]: INFO nova.scheduler.client.report [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Deleted allocations for instance ea1ca448-0e06-4548-80cd-9107b43eefe4 [ 1934.313329] env[67424]: DEBUG oslo_concurrency.lockutils [None req-3e82f702-e436-4c99-a53c-1efeaf49994d tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 644.906s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.314720] env[67424]: DEBUG oslo_concurrency.lockutils [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 448.272s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.314952] env[67424]: DEBUG oslo_concurrency.lockutils [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Acquiring lock "ea1ca448-0e06-4548-80cd-9107b43eefe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.315186] env[67424]: DEBUG oslo_concurrency.lockutils [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.315359] env[67424]: DEBUG oslo_concurrency.lockutils [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.317353] env[67424]: INFO nova.compute.manager [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Terminating instance [ 1934.319738] env[67424]: DEBUG nova.compute.manager [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1934.319934] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1934.320440] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01a2bbf2-47cb-4e90-95c0-10528434b0e5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.329699] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f400a7-e31b-40de-b590-b2854a6926e7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.339955] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1934.360362] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ea1ca448-0e06-4548-80cd-9107b43eefe4 could not be found. [ 1934.360580] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1934.360754] env[67424]: INFO nova.compute.manager [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1934.361085] env[67424]: DEBUG oslo.service.loopingcall [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.361359] env[67424]: DEBUG nova.compute.manager [-] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1934.361455] env[67424]: DEBUG nova.network.neutron [-] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1934.386351] env[67424]: DEBUG nova.network.neutron [-] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.391994] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.392241] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.393625] env[67424]: INFO nova.compute.claims [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1934.396367] env[67424]: INFO nova.compute.manager [-] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] Took 0.03 seconds to deallocate network for instance. [ 1934.495310] env[67424]: DEBUG oslo_concurrency.lockutils [None req-24765025-2576-4abc-8543-c8349930cd3c tempest-VolumesAdminNegativeTest-928234619 tempest-VolumesAdminNegativeTest-928234619-project-member] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.496634] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 87.751s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.496852] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: ea1ca448-0e06-4548-80cd-9107b43eefe4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1934.497042] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "ea1ca448-0e06-4548-80cd-9107b43eefe4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.557971] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec326ff9-4313-4691-ad64-95d95148aae1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.565364] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f005b974-0431-4938-be7d-429a77630a02 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.595704] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb28b35a-a60c-464f-a7a1-19172c8390be {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.602411] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a19b19-9588-44f7-9b72-eb455d237ebc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.615286] env[67424]: DEBUG nova.compute.provider_tree [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1934.624967] env[67424]: DEBUG nova.scheduler.client.report [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1934.639436] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.247s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.639911] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1934.670119] env[67424]: DEBUG nova.compute.utils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1934.671362] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1934.671527] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1934.680913] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1934.736113] env[67424]: DEBUG nova.policy [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c51b8467e9a4dd7b7259edacd7f0fca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '32577de731d749ffb2939075f98687dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 1934.740456] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1934.766212] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1934.766461] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1934.766617] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1934.766795] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1934.766940] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1934.767116] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1934.767329] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1934.767490] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1934.767680] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1934.767859] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1934.768048] env[67424]: DEBUG nova.virt.hardware [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1934.769009] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab2d3fc-91c2-49a8-bff9-99a91aa767a6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.776893] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc32dad-15b0-4eb1-942f-825ad28dda00 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.186910] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Successfully created port: f851fa01-bf12-4ed9-be4f-f52e35923b02 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1935.832122] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Successfully updated port: f851fa01-bf12-4ed9-be4f-f52e35923b02 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1935.842744] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "refresh_cache-909d3788-23da-446f-9c47-46df54003e1c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.842892] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "refresh_cache-909d3788-23da-446f-9c47-46df54003e1c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.843050] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1935.885843] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1936.248188] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Updating instance_info_cache with network_info: [{"id": "f851fa01-bf12-4ed9-be4f-f52e35923b02", "address": "fa:16:3e:32:26:d8", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf851fa01-bf", "ovs_interfaceid": "f851fa01-bf12-4ed9-be4f-f52e35923b02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.262341] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "refresh_cache-909d3788-23da-446f-9c47-46df54003e1c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.262652] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Instance network_info: |[{"id": "f851fa01-bf12-4ed9-be4f-f52e35923b02", "address": "fa:16:3e:32:26:d8", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf851fa01-bf", "ovs_interfaceid": "f851fa01-bf12-4ed9-be4f-f52e35923b02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1936.263084] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:26:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f851fa01-bf12-4ed9-be4f-f52e35923b02', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1936.270651] env[67424]: DEBUG oslo.service.loopingcall [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1936.271147] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1936.271409] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7dc093e2-315e-4d00-9782-d95a57f6caee {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.292039] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1936.292039] env[67424]: value = "task-3200079" [ 1936.292039] env[67424]: _type = "Task" [ 1936.292039] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.299920] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200079, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.320355] env[67424]: DEBUG nova.compute.manager [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Received event network-vif-plugged-f851fa01-bf12-4ed9-be4f-f52e35923b02 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1936.320629] env[67424]: DEBUG oslo_concurrency.lockutils [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] Acquiring lock "909d3788-23da-446f-9c47-46df54003e1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.320812] env[67424]: DEBUG oslo_concurrency.lockutils [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] Lock "909d3788-23da-446f-9c47-46df54003e1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.320979] env[67424]: DEBUG oslo_concurrency.lockutils [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] Lock "909d3788-23da-446f-9c47-46df54003e1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.321154] env[67424]: DEBUG nova.compute.manager [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] No waiting events found dispatching network-vif-plugged-f851fa01-bf12-4ed9-be4f-f52e35923b02 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1936.321322] env[67424]: WARNING nova.compute.manager [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Received unexpected event network-vif-plugged-f851fa01-bf12-4ed9-be4f-f52e35923b02 for instance with vm_state building and task_state spawning. [ 1936.321484] env[67424]: DEBUG nova.compute.manager [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Received event network-changed-f851fa01-bf12-4ed9-be4f-f52e35923b02 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1936.321635] env[67424]: DEBUG nova.compute.manager [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Refreshing instance network info cache due to event network-changed-f851fa01-bf12-4ed9-be4f-f52e35923b02. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1936.321818] env[67424]: DEBUG oslo_concurrency.lockutils [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] Acquiring lock "refresh_cache-909d3788-23da-446f-9c47-46df54003e1c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.321952] env[67424]: DEBUG oslo_concurrency.lockutils [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] Acquired lock "refresh_cache-909d3788-23da-446f-9c47-46df54003e1c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.322116] env[67424]: DEBUG nova.network.neutron [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Refreshing network info cache for port f851fa01-bf12-4ed9-be4f-f52e35923b02 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1936.801147] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200079, 'name': CreateVM_Task, 'duration_secs': 0.266976} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.801341] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1936.808722] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.808925] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.809212] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1936.809452] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4e70c98-7a1a-4f6b-91cf-ac84e4448be9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.811552] env[67424]: DEBUG nova.network.neutron [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Updated VIF entry in instance network info cache for port f851fa01-bf12-4ed9-be4f-f52e35923b02. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1936.811869] env[67424]: DEBUG nova.network.neutron [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Updating instance_info_cache with network_info: [{"id": "f851fa01-bf12-4ed9-be4f-f52e35923b02", "address": "fa:16:3e:32:26:d8", "network": {"id": "5e647726-7015-4ec7-a6f2-1e9093ef77ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-421406211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32577de731d749ffb2939075f98687dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7d09e9-a3dd-4d89-b9dd-2814f5f6dd5d", "external-id": "nsx-vlan-transportzone-591", "segmentation_id": 591, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf851fa01-bf", "ovs_interfaceid": "f851fa01-bf12-4ed9-be4f-f52e35923b02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.815725] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 1936.815725] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]529a5024-816d-b2b9-9a1e-a121918f5e98" [ 1936.815725] env[67424]: _type = "Task" [ 1936.815725] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.823952] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]529a5024-816d-b2b9-9a1e-a121918f5e98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.824918] env[67424]: DEBUG oslo_concurrency.lockutils [req-81341fbd-1f8e-4df8-b65b-541a40700c17 req-34071b34-24fc-46c2-a006-31bbec3bc7f3 service nova] Releasing lock "refresh_cache-909d3788-23da-446f-9c47-46df54003e1c" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.326713] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.327142] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1937.327403] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.388086] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.387089] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.387393] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1944.383525] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1944.387205] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.388378] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.388649] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.387921] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.400307] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.400623] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.400763] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.400929] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1946.402040] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954f7e3d-a85a-4d83-ae46-2e0fa5638885 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.411245] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75d1441-4900-4cde-9047-c471e1df900a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.426578] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5eb97c0-872a-495b-b7ed-91510f140210 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.432722] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f008cedc-249a-44d3-82f6-eed10e806f92 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.461084] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180978MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1946.461248] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.461403] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.533223] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.533418] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance afd0f239-0752-4e2e-a232-9f22722753f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.533621] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.533737] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.533876] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.534040] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.534197] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.534350] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.534496] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.534641] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1946.534853] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1946.535022] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1946.651397] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a459872f-dba5-4e00-b933-a6704408935c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.658913] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7aafef-55e8-4d3a-a649-4c0082721a0d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.689467] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75af929-7c11-4ceb-9575-c3e5edd1c530 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.696861] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5084f88-05a6-4e3a-96e2-82917408dd59 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.709486] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1946.718777] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1946.734718] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1946.734718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.273s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.735346] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1948.388318] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1948.388567] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1948.388655] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1948.409237] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.409399] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.409502] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.409625] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.409743] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.409860] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.410025] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.410104] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.410224] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.410339] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1948.410485] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1956.405882] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1983.481160] env[67424]: WARNING oslo_vmware.rw_handles [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1983.481160] env[67424]: ERROR oslo_vmware.rw_handles [ 1983.481806] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1983.484132] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1983.484386] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Copying Virtual Disk [datastore2] vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/98f78d12-2e2f-4cd6-996a-8e94dc6f802f/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1983.484668] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1f5dc3e-41c9-4dad-a0e7-2ee7a49e1d2a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.492919] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1983.492919] env[67424]: value = "task-3200080" [ 1983.492919] env[67424]: _type = "Task" [ 1983.492919] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.501774] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': task-3200080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.003789] env[67424]: DEBUG oslo_vmware.exceptions [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1984.004092] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.004610] env[67424]: ERROR nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1984.004610] env[67424]: Faults: ['InvalidArgument'] [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Traceback (most recent call last): [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] yield resources [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self.driver.spawn(context, instance, image_meta, [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self._fetch_image_if_missing(context, vi) [ 1984.004610] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] image_cache(vi, tmp_image_ds_loc) [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] vm_util.copy_virtual_disk( [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] session._wait_for_task(vmdk_copy_task) [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] return self.wait_for_task(task_ref) [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] return evt.wait() [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] result = hub.switch() [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1984.005148] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] return self.greenlet.switch() [ 1984.005512] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1984.005512] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self.f(*self.args, **self.kw) [ 1984.005512] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1984.005512] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] raise exceptions.translate_fault(task_info.error) [ 1984.005512] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1984.005512] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Faults: ['InvalidArgument'] [ 1984.005512] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] [ 1984.005512] env[67424]: INFO nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Terminating instance [ 1984.006491] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.006707] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.006940] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88387872-819e-43ba-ab77-e69acfd268e1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.009064] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1984.009264] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1984.009949] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d149549-c3f4-49ab-850c-217d6c4519ff {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.016429] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1984.016647] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e714d370-d6c4-47d5-8e6d-3753c94d0e2c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.018692] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.018863] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1984.019761] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a37cff23-4cdf-4bd2-9dc1-407334ef2c6b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.024260] env[67424]: DEBUG oslo_vmware.api [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Waiting for the task: (returnval){ [ 1984.024260] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52452254-8c4b-2121-4bb7-325f9186240e" [ 1984.024260] env[67424]: _type = "Task" [ 1984.024260] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.034147] env[67424]: DEBUG oslo_vmware.api [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52452254-8c4b-2121-4bb7-325f9186240e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.078616] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1984.079021] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1984.079339] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Deleting the datastore file [datastore2] f98aafb6-c8e3-44fd-b942-06e4b6bbc52a {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.079724] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd306164-06ac-4eda-b66a-5ac77bcc08fc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.087579] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for the task: (returnval){ [ 1984.087579] env[67424]: value = "task-3200082" [ 1984.087579] env[67424]: _type = "Task" [ 1984.087579] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.101425] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': task-3200082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.533785] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1984.534125] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Creating directory with path [datastore2] vmware_temp/f4b1b70c-4e57-4a79-8836-f5d2e4ce3778/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.534251] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0dae3658-c3e0-4e40-82ff-a389ba2d8eaf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.545498] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Created directory with path [datastore2] vmware_temp/f4b1b70c-4e57-4a79-8836-f5d2e4ce3778/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.545684] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Fetch image to [datastore2] vmware_temp/f4b1b70c-4e57-4a79-8836-f5d2e4ce3778/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1984.545856] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/f4b1b70c-4e57-4a79-8836-f5d2e4ce3778/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1984.546581] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42c3924-bbd6-4815-85e5-75d7fd5d9bed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.552785] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d76f5f2-59f3-4860-8ab5-b83436228f56 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.561605] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d2d59b-3fd6-44af-b457-e69454e4baac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.594901] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acae566-8aba-418a-92d6-c9f91bced596 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.601385] env[67424]: DEBUG oslo_vmware.api [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Task: {'id': task-3200082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064831} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.602848] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1984.602988] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1984.603185] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1984.603362] env[67424]: INFO nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1984.605118] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aa116628-be8e-4976-b5ff-1a81c7c255a3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.607026] env[67424]: DEBUG nova.compute.claims [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1984.607155] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.607370] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.632992] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1984.773761] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.775354] env[67424]: ERROR nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff. [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Traceback (most recent call last): [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] result = getattr(controller, method)(*args, **kwargs) [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._get(image_id) [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1984.775354] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] resp, body = self.http_client.get(url, headers=header) [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.request(url, 'GET', **kwargs) [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._handle_response(resp) [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise exc.from_response(resp, resp.content) [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] During handling of the above exception, another exception occurred: [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1984.775909] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Traceback (most recent call last): [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] yield resources [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self.driver.spawn(context, instance, image_meta, [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._fetch_image_if_missing(context, vi) [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] image_fetch(context, vi, tmp_image_ds_loc) [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] images.fetch_image( [ 1984.776307] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] metadata = IMAGE_API.get(context, image_ref) [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return session.show(context, image_id, [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] _reraise_translated_image_exception(image_id) [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise new_exc.with_traceback(exc_trace) [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] result = getattr(controller, method)(*args, **kwargs) [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1984.776684] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._get(image_id) [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] resp, body = self.http_client.get(url, headers=header) [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.request(url, 'GET', **kwargs) [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._handle_response(resp) [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise exc.from_response(resp, resp.content) [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] nova.exception.ImageNotAuthorized: Not authorized for image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff. [ 1984.777162] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1984.777662] env[67424]: INFO nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Terminating instance [ 1984.777662] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.777662] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.777974] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.778207] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquired lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.778314] env[67424]: DEBUG nova.network.neutron [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1984.779239] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58fe6bad-59f8-4f0d-9186-fe9ff87d2d8d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.783256] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf98352-da93-4231-947f-8f5bca7410b2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.794209] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caba5707-edae-4a5f-ae4c-98055753ba3f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.797154] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.797332] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1984.798459] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b99b25cd-2d26-4839-9515-5dfddfa606af {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.829544] env[67424]: DEBUG nova.network.neutron [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1984.832713] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae08a3ea-f433-46f3-866a-3f2366fa914d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.835118] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Waiting for the task: (returnval){ [ 1984.835118] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]5254d27e-40f3-87cd-9bd4-419b0352d249" [ 1984.835118] env[67424]: _type = "Task" [ 1984.835118] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.841365] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbf27c7-7a45-40a4-aa3a-009670d314c3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.850019] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1984.850255] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Creating directory with path [datastore2] vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.850467] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b18c82c-d109-48d7-9fb4-288e0a8e42d3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.859692] env[67424]: DEBUG nova.compute.provider_tree [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1984.868161] env[67424]: DEBUG nova.scheduler.client.report [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1984.878600] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Created directory with path [datastore2] vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.878796] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Fetch image to [datastore2] vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1984.878966] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1984.879733] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1275e4e-dcd8-4af8-a0bd-4b14b7ba29d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.883933] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.276s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.884443] env[67424]: ERROR nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1984.884443] env[67424]: Faults: ['InvalidArgument'] [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Traceback (most recent call last): [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self.driver.spawn(context, instance, image_meta, [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self._fetch_image_if_missing(context, vi) [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] image_cache(vi, tmp_image_ds_loc) [ 1984.884443] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] vm_util.copy_virtual_disk( [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] session._wait_for_task(vmdk_copy_task) [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] return self.wait_for_task(task_ref) [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] return evt.wait() [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] result = hub.switch() [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] return self.greenlet.switch() [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1984.884877] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] self.f(*self.args, **self.kw) [ 1984.885230] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1984.885230] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] raise exceptions.translate_fault(task_info.error) [ 1984.885230] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1984.885230] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Faults: ['InvalidArgument'] [ 1984.885230] env[67424]: ERROR nova.compute.manager [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] [ 1984.885230] env[67424]: DEBUG nova.compute.utils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1984.888852] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Build of instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a was re-scheduled: A specified parameter was not correct: fileType [ 1984.888852] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1984.889256] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1984.889427] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1984.889641] env[67424]: DEBUG nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1984.889759] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1984.891726] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e50f1c7-3a58-4f5e-8f79-9b55788cc672 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.901318] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f718f1c9-ce61-4b1c-a690-bc5fa96500dd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.905430] env[67424]: DEBUG nova.network.neutron [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.935303] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Releasing lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.935698] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1984.935888] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1984.936793] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b28a57d-fde0-4762-9683-88a0abef1109 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.940259] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23792d90-8502-44f5-b699-2cf3a99b2ae3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.948037] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b11c54dc-bf7d-49a4-a3fa-75767337066a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.949899] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1984.950128] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01207553-1c6c-41ee-a5a2-6d7e9413cef8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.969648] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1984.977844] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1984.978090] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1984.978274] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Deleting the datastore file [datastore2] afd0f239-0752-4e2e-a232-9f22722753f5 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.978524] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc6595d2-be7a-4678-8541-011a4fa6531f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.984345] env[67424]: DEBUG oslo_vmware.api [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Waiting for the task: (returnval){ [ 1984.984345] env[67424]: value = "task-3200084" [ 1984.984345] env[67424]: _type = "Task" [ 1984.984345] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.992904] env[67424]: DEBUG oslo_vmware.api [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Task: {'id': task-3200084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.026555] env[67424]: DEBUG oslo_vmware.rw_handles [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1985.088885] env[67424]: DEBUG oslo_vmware.rw_handles [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1985.089111] env[67424]: DEBUG oslo_vmware.rw_handles [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1985.243891] env[67424]: DEBUG nova.network.neutron [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.256534] env[67424]: INFO nova.compute.manager [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Took 0.37 seconds to deallocate network for instance. [ 1985.352407] env[67424]: INFO nova.scheduler.client.report [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Deleted allocations for instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a [ 1985.379630] env[67424]: DEBUG oslo_concurrency.lockutils [None req-e87bd998-5df8-42ac-a51b-2b6830a82b51 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 686.523s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.380162] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 490.203s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.380256] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.380416] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.380598] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.382699] env[67424]: INFO nova.compute.manager [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Terminating instance [ 1985.384367] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquiring lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.384537] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Acquired lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.384693] env[67424]: DEBUG nova.network.neutron [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1985.415282] env[67424]: DEBUG nova.network.neutron [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1985.494567] env[67424]: DEBUG oslo_vmware.api [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Task: {'id': task-3200084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030079} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.497076] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.497076] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1985.497076] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1985.497076] env[67424]: INFO nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1985.497076] env[67424]: DEBUG oslo.service.loopingcall [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1985.497344] env[67424]: DEBUG nova.compute.manager [-] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1985.498453] env[67424]: DEBUG nova.compute.claims [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1985.498453] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.498601] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.581331] env[67424]: DEBUG nova.network.neutron [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.590869] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Releasing lock "refresh_cache-f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.591325] env[67424]: DEBUG nova.compute.manager [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1985.591519] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1985.592052] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58b625b2-9d02-4bf8-b9b1-e1091c8232b1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.603077] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b47c0f-aa96-4688-a361-3669f3f72046 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.634172] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f98aafb6-c8e3-44fd-b942-06e4b6bbc52a could not be found. [ 1985.634373] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1985.634547] env[67424]: INFO nova.compute.manager [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1985.634780] env[67424]: DEBUG oslo.service.loopingcall [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1985.636883] env[67424]: DEBUG nova.compute.manager [-] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1985.636985] env[67424]: DEBUG nova.network.neutron [-] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1985.653836] env[67424]: DEBUG nova.network.neutron [-] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1985.658951] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93828625-a911-463c-87e0-92a85db34594 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.661903] env[67424]: DEBUG nova.network.neutron [-] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.667405] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ae98a1-1005-4e8c-a7e3-344154a3258b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.670899] env[67424]: INFO nova.compute.manager [-] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] Took 0.03 seconds to deallocate network for instance. [ 1985.698930] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ced46a-68d2-4e38-88ff-f5d3b2f9b6e6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.708576] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c937a3d-0f35-4f0d-b8b3-334525bad2ed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.724907] env[67424]: DEBUG nova.compute.provider_tree [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1985.733450] env[67424]: DEBUG nova.scheduler.client.report [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1985.748478] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.250s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.749156] env[67424]: ERROR nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff. [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Traceback (most recent call last): [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] result = getattr(controller, method)(*args, **kwargs) [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._get(image_id) [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1985.749156] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] resp, body = self.http_client.get(url, headers=header) [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.request(url, 'GET', **kwargs) [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._handle_response(resp) [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise exc.from_response(resp, resp.content) [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] During handling of the above exception, another exception occurred: [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1985.749531] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Traceback (most recent call last): [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self.driver.spawn(context, instance, image_meta, [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._fetch_image_if_missing(context, vi) [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] image_fetch(context, vi, tmp_image_ds_loc) [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] images.fetch_image( [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] metadata = IMAGE_API.get(context, image_ref) [ 1985.749870] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return session.show(context, image_id, [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] _reraise_translated_image_exception(image_id) [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise new_exc.with_traceback(exc_trace) [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] result = getattr(controller, method)(*args, **kwargs) [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._get(image_id) [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1985.750248] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] resp, body = self.http_client.get(url, headers=header) [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.request(url, 'GET', **kwargs) [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self._handle_response(resp) [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise exc.from_response(resp, resp.content) [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] nova.exception.ImageNotAuthorized: Not authorized for image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff. [ 1985.750662] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1985.750662] env[67424]: DEBUG nova.compute.utils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Not authorized for image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff. {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1985.751476] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Build of instance afd0f239-0752-4e2e-a232-9f22722753f5 was re-scheduled: Not authorized for image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff. {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1985.751917] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1985.752169] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.752320] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquired lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.752486] env[67424]: DEBUG nova.network.neutron [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1985.766750] env[67424]: DEBUG oslo_concurrency.lockutils [None req-29f17dd4-5a8a-4c7a-ba3b-1c0f344c4ad8 tempest-AttachVolumeTestJSON-66883650 tempest-AttachVolumeTestJSON-66883650-project-member] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.387s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.767555] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 139.022s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.767740] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: f98aafb6-c8e3-44fd-b942-06e4b6bbc52a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1985.767909] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "f98aafb6-c8e3-44fd-b942-06e4b6bbc52a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.777598] env[67424]: DEBUG nova.network.neutron [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1985.836430] env[67424]: DEBUG nova.network.neutron [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.845352] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Releasing lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.845572] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1985.845754] env[67424]: DEBUG nova.compute.manager [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Skipping network deallocation for instance since networking was not requested. {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1985.933207] env[67424]: INFO nova.scheduler.client.report [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Deleted allocations for instance afd0f239-0752-4e2e-a232-9f22722753f5 [ 1985.952077] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f9110eba-a1f6-4f0d-b5a3-330019d14135 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "afd0f239-0752-4e2e-a232-9f22722753f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.933s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.952379] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "afd0f239-0752-4e2e-a232-9f22722753f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.836s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.952601] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "afd0f239-0752-4e2e-a232-9f22722753f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.952806] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "afd0f239-0752-4e2e-a232-9f22722753f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.952972] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "afd0f239-0752-4e2e-a232-9f22722753f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.954864] env[67424]: INFO nova.compute.manager [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Terminating instance [ 1985.956906] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquiring lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.957077] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Acquired lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.957247] env[67424]: DEBUG nova.network.neutron [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1985.982555] env[67424]: DEBUG nova.network.neutron [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1986.044315] env[67424]: DEBUG nova.network.neutron [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.053065] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Releasing lock "refresh_cache-afd0f239-0752-4e2e-a232-9f22722753f5" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.053475] env[67424]: DEBUG nova.compute.manager [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1986.053665] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1986.054187] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc6d4722-c006-414c-8104-4680fa84ab43 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.063656] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd6ecf2-dcc0-4cf8-945a-495e81216534 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.090207] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance afd0f239-0752-4e2e-a232-9f22722753f5 could not be found. [ 1986.090348] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1986.090521] env[67424]: INFO nova.compute.manager [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1986.090753] env[67424]: DEBUG oslo.service.loopingcall [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1986.090974] env[67424]: DEBUG nova.compute.manager [-] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1986.091109] env[67424]: DEBUG nova.network.neutron [-] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1986.183531] env[67424]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67424) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1986.183822] env[67424]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-3f5fe84a-98f2-4bca-a03d-72a22437b691'] [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1986.184423] env[67424]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1986.184944] env[67424]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.185502] env[67424]: ERROR oslo.service.loopingcall [ 1986.185994] env[67424]: ERROR nova.compute.manager [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.216222] env[67424]: ERROR nova.compute.manager [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Traceback (most recent call last): [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] ret = obj(*args, **kwargs) [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] exception_handler_v20(status_code, error_body) [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise client_exc(message=error_message, [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Neutron server returns request_ids: ['req-3f5fe84a-98f2-4bca-a03d-72a22437b691'] [ 1986.216222] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] During handling of the above exception, another exception occurred: [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Traceback (most recent call last): [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._delete_instance(context, instance, bdms) [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._shutdown_instance(context, instance, bdms) [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._try_deallocate_network(context, instance, requested_networks) [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] with excutils.save_and_reraise_exception(): [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.216718] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self.force_reraise() [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise self.value [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] _deallocate_network_with_retries() [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return evt.wait() [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] result = hub.switch() [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.greenlet.switch() [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1986.217207] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] result = func(*self.args, **self.kw) [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] result = f(*args, **kwargs) [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._deallocate_network( [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self.network_api.deallocate_for_instance( [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] data = neutron.list_ports(**search_opts) [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] ret = obj(*args, **kwargs) [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.list('ports', self.ports_path, retrieve_all, [ 1986.217607] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] ret = obj(*args, **kwargs) [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] for r in self._pagination(collection, path, **params): [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] res = self.get(path, params=params) [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] ret = obj(*args, **kwargs) [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.retry_request("GET", action, body=body, [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] ret = obj(*args, **kwargs) [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1986.218025] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] return self.do_request(method, action, body=body, [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] ret = obj(*args, **kwargs) [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] self._handle_fault_response(status_code, replybody, resp) [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.218428] env[67424]: ERROR nova.compute.manager [instance: afd0f239-0752-4e2e-a232-9f22722753f5] [ 1986.243690] env[67424]: DEBUG oslo_concurrency.lockutils [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Lock "afd0f239-0752-4e2e-a232-9f22722753f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.291s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.244819] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "afd0f239-0752-4e2e-a232-9f22722753f5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 139.499s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.244997] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1986.245186] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "afd0f239-0752-4e2e-a232-9f22722753f5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.287418] env[67424]: INFO nova.compute.manager [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] [instance: afd0f239-0752-4e2e-a232-9f22722753f5] Successfully reverted task state from None on failure for instance. [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server [None req-4e5b9681-b5f8-4135-9afb-8c8c18b6c340 tempest-ServerShowV254Test-597616216 tempest-ServerShowV254Test-597616216-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-3f5fe84a-98f2-4bca-a03d-72a22437b691'] [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1986.290593] env[67424]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.291249] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1986.291821] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1986.292386] env[67424]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.292934] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.293508] env[67424]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.294069] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1986.294069] env[67424]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1986.294069] env[67424]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1986.294069] env[67424]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1986.294069] env[67424]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.294069] env[67424]: ERROR oslo_messaging.rpc.server [ 2001.387871] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.389642] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.389904] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2005.384520] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2005.387145] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.387355] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.388233] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.388233] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.389063] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.389063] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2008.389063] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2008.408421] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.408587] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.408700] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.408826] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.408948] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.409080] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.409198] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.409313] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2008.409456] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2008.409938] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.421075] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.421296] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.422089] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.422344] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2008.423470] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6430c9-d284-422b-a124-d4e7d76297ad {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.432241] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e059a31-79f7-4071-b180-795479f520cf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.446049] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34b9ff7-8997-49ed-8dbe-10f7e290b21c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.452223] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d400a2e7-abec-477e-b4d1-fba7308fd517 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.480249] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180979MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2008.480388] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.480571] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.546384] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.546539] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.546666] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.546787] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.546902] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.547029] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.547309] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.547309] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2008.547453] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2008.547564] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2008.563908] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2008.577268] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2008.577445] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2008.588220] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2008.605041] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2008.698852] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab1f509-25b4-4d8c-bfa4-03044fb128d3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.706236] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed9c2fb-a511-472b-a970-e5ee246e0967 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.736254] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292cfd58-b362-4658-b950-a5fad4cb8062 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.742862] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c83633-6d77-459e-9f0c-10a00a003775 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.755515] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2008.763898] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2008.777083] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2008.777268] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.297s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.062756] env[67424]: WARNING oslo_vmware.rw_handles [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2034.062756] env[67424]: ERROR oslo_vmware.rw_handles [ 2034.063481] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2034.065546] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2034.065790] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Copying Virtual Disk [datastore2] vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/83cbabe8-f2f8-45f3-b875-8913a157cd2c/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2034.066078] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c16005ac-3520-42aa-9b4c-c190fdc2ac8c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.073739] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Waiting for the task: (returnval){ [ 2034.073739] env[67424]: value = "task-3200085" [ 2034.073739] env[67424]: _type = "Task" [ 2034.073739] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.081335] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Task: {'id': task-3200085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.584043] env[67424]: DEBUG oslo_vmware.exceptions [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2034.584331] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.584905] env[67424]: ERROR nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2034.584905] env[67424]: Faults: ['InvalidArgument'] [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Traceback (most recent call last): [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] yield resources [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self.driver.spawn(context, instance, image_meta, [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self._fetch_image_if_missing(context, vi) [ 2034.584905] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] image_cache(vi, tmp_image_ds_loc) [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] vm_util.copy_virtual_disk( [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] session._wait_for_task(vmdk_copy_task) [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] return self.wait_for_task(task_ref) [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] return evt.wait() [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] result = hub.switch() [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2034.585341] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] return self.greenlet.switch() [ 2034.585700] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2034.585700] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self.f(*self.args, **self.kw) [ 2034.585700] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2034.585700] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] raise exceptions.translate_fault(task_info.error) [ 2034.585700] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2034.585700] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Faults: ['InvalidArgument'] [ 2034.585700] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] [ 2034.585700] env[67424]: INFO nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Terminating instance [ 2034.587216] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.587216] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2034.587372] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a62099c1-d6da-4455-9b5a-683e2b065d8d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.589567] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2034.589761] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2034.590497] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c7166b-ed85-48e5-be72-ea9482212a44 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.597511] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2034.597728] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d5ca9b5-e59f-4a7a-b27e-b95fe17e500e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.602312] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2034.602488] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2034.603172] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff016497-3e1f-405a-824f-1800d745c4e9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.607698] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 2034.607698] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52659293-9cf0-71c1-6f57-2849118cf0ac" [ 2034.607698] env[67424]: _type = "Task" [ 2034.607698] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.616684] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52659293-9cf0-71c1-6f57-2849118cf0ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.673935] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2034.674111] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2034.674222] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Deleting the datastore file [datastore2] 72d5f322-47e3-402e-abcc-1b5b0497bc1f {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2034.674495] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0859f78-d1bd-4ea9-a0b0-cddaf0975a63 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.680787] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Waiting for the task: (returnval){ [ 2034.680787] env[67424]: value = "task-3200087" [ 2034.680787] env[67424]: _type = "Task" [ 2034.680787] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.690182] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Task: {'id': task-3200087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.121164] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2035.121545] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating directory with path [datastore2] vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2035.121827] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f11ac5aa-27e8-4930-ba70-d24456ee325b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.164221] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Created directory with path [datastore2] vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2035.164442] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Fetch image to [datastore2] vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2035.164611] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2035.165472] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f86005-0244-4645-bf05-6c8ce8607a9a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.172545] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88940f07-3062-4255-ae6c-a88a3061a1cb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.181617] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5a6f20-9c3e-45f0-aef3-66746fa853b8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.216102] env[67424]: DEBUG oslo_vmware.api [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Task: {'id': task-3200087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192187} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.216577] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2035.216769] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2035.216940] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2035.217150] env[67424]: INFO nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2035.219138] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a61002-6ceb-4c0e-9ab8-c748c768a802 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.221734] env[67424]: DEBUG nova.compute.claims [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2035.221920] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.222140] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.227911] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4efa64cc-9945-4c4d-8cf7-58aca11625f8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.249574] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2035.367464] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d5a22b-6677-4e01-91cb-8448ecb6bb9e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.375300] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522bdb08-2c11-42fa-9402-65bbddce450c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.410716] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425f148a-f260-4f08-a253-45eef6cc8cdd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.417797] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1c956b-938c-49d7-a2b8-6428ea1718cf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.430700] env[67424]: DEBUG nova.compute.provider_tree [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2035.433237] env[67424]: DEBUG oslo_vmware.rw_handles [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2035.490733] env[67424]: DEBUG nova.scheduler.client.report [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2035.495636] env[67424]: DEBUG oslo_vmware.rw_handles [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2035.495804] env[67424]: DEBUG oslo_vmware.rw_handles [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2035.506799] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.285s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.507335] env[67424]: ERROR nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2035.507335] env[67424]: Faults: ['InvalidArgument'] [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Traceback (most recent call last): [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self.driver.spawn(context, instance, image_meta, [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self._fetch_image_if_missing(context, vi) [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] image_cache(vi, tmp_image_ds_loc) [ 2035.507335] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] vm_util.copy_virtual_disk( [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] session._wait_for_task(vmdk_copy_task) [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] return self.wait_for_task(task_ref) [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] return evt.wait() [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] result = hub.switch() [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] return self.greenlet.switch() [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2035.507845] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] self.f(*self.args, **self.kw) [ 2035.508274] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2035.508274] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] raise exceptions.translate_fault(task_info.error) [ 2035.508274] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2035.508274] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Faults: ['InvalidArgument'] [ 2035.508274] env[67424]: ERROR nova.compute.manager [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] [ 2035.508274] env[67424]: DEBUG nova.compute.utils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2035.509465] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Build of instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f was re-scheduled: A specified parameter was not correct: fileType [ 2035.509465] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2035.509836] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2035.510092] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2035.510287] env[67424]: DEBUG nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2035.510453] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2035.898755] env[67424]: DEBUG nova.network.neutron [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.910765] env[67424]: INFO nova.compute.manager [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Took 0.40 seconds to deallocate network for instance. [ 2036.027332] env[67424]: INFO nova.scheduler.client.report [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Deleted allocations for instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f [ 2036.059141] env[67424]: DEBUG oslo_concurrency.lockutils [None req-488d5522-3e3e-4f97-be72-3b8786fb01f5 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 579.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.059430] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 383.155s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.059653] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Acquiring lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.059862] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.060116] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.062118] env[67424]: INFO nova.compute.manager [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Terminating instance [ 2036.065407] env[67424]: DEBUG nova.compute.manager [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2036.065606] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2036.065865] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b549b86a-bc86-40b0-9ed6-2c184a91c29a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.074970] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb64bf76-8124-44eb-80ce-1ac1c2959607 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.103259] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72d5f322-47e3-402e-abcc-1b5b0497bc1f could not be found. [ 2036.103485] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2036.103662] env[67424]: INFO nova.compute.manager [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2036.103905] env[67424]: DEBUG oslo.service.loopingcall [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.104130] env[67424]: DEBUG nova.compute.manager [-] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2036.104229] env[67424]: DEBUG nova.network.neutron [-] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2036.129796] env[67424]: DEBUG nova.network.neutron [-] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.138073] env[67424]: INFO nova.compute.manager [-] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] Took 0.03 seconds to deallocate network for instance. [ 2036.247311] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2a470ece-2c73-4799-9450-0c48d7f07378 tempest-ServerAddressesNegativeTestJSON-1984731396 tempest-ServerAddressesNegativeTestJSON-1984731396-project-member] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.248222] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 189.502s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.248449] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 72d5f322-47e3-402e-abcc-1b5b0497bc1f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2036.248674] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "72d5f322-47e3-402e-abcc-1b5b0497bc1f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.687018] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "461f92ee-b076-4cb7-8170-66cddb898b99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.756823] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.383647] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.387282] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.387473] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.387625] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2065.666179] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.666410] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.676594] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2065.739508] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.739801] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.741320] env[67424]: INFO nova.compute.claims [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2065.873908] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa738e09-c3af-4699-be6a-cb7616b78276 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.882987] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec53c52-7389-455a-b2cf-8e553e8860aa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.913449] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616e6462-38a7-492e-9798-3928b17b62aa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.920686] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df72ba5-d3e0-4f79-8fed-46774abbe8b2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.933899] env[67424]: DEBUG nova.compute.provider_tree [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2065.943770] env[67424]: DEBUG nova.scheduler.client.report [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2065.959039] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.219s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.959495] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2065.991203] env[67424]: DEBUG nova.compute.utils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2065.992563] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2065.992730] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2066.001556] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2066.052219] env[67424]: DEBUG nova.policy [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ac8098a2a904b4292a23bc38e8be219', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc66a2bf57d34e309f0f21a60c224076', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 2066.065793] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2066.092594] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2066.094319] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2066.094319] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2066.094319] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2066.094319] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2066.094319] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2066.094613] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2066.094613] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2066.094613] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2066.094613] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2066.094821] env[67424]: DEBUG nova.virt.hardware [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2066.095704] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418ba2a7-4857-4d0f-bf22-7f4e24bb58c2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.103773] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a40651-0fb0-48a6-9325-633d4fc403a6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.361899] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Successfully created port: 0e4c448c-dcdc-4d06-9805-bad9cd8ae913 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2066.870094] env[67424]: DEBUG nova.compute.manager [req-125ccd4f-eb6d-4995-94bd-fc581793d097 req-9c2e6179-cc50-4290-b45c-2d79e3adb21b service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Received event network-vif-plugged-0e4c448c-dcdc-4d06-9805-bad9cd8ae913 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2066.870403] env[67424]: DEBUG oslo_concurrency.lockutils [req-125ccd4f-eb6d-4995-94bd-fc581793d097 req-9c2e6179-cc50-4290-b45c-2d79e3adb21b service nova] Acquiring lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.870553] env[67424]: DEBUG oslo_concurrency.lockutils [req-125ccd4f-eb6d-4995-94bd-fc581793d097 req-9c2e6179-cc50-4290-b45c-2d79e3adb21b service nova] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.870731] env[67424]: DEBUG oslo_concurrency.lockutils [req-125ccd4f-eb6d-4995-94bd-fc581793d097 req-9c2e6179-cc50-4290-b45c-2d79e3adb21b service nova] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.870919] env[67424]: DEBUG nova.compute.manager [req-125ccd4f-eb6d-4995-94bd-fc581793d097 req-9c2e6179-cc50-4290-b45c-2d79e3adb21b service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] No waiting events found dispatching network-vif-plugged-0e4c448c-dcdc-4d06-9805-bad9cd8ae913 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2066.871129] env[67424]: WARNING nova.compute.manager [req-125ccd4f-eb6d-4995-94bd-fc581793d097 req-9c2e6179-cc50-4290-b45c-2d79e3adb21b service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Received unexpected event network-vif-plugged-0e4c448c-dcdc-4d06-9805-bad9cd8ae913 for instance with vm_state building and task_state spawning. [ 2066.948062] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Successfully updated port: 0e4c448c-dcdc-4d06-9805-bad9cd8ae913 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2066.957852] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "refresh_cache-55bf5d18-e16a-4fed-b11b-30ceb40c2d46" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.958105] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "refresh_cache-55bf5d18-e16a-4fed-b11b-30ceb40c2d46" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.958212] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2067.003841] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2067.360435] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Updating instance_info_cache with network_info: [{"id": "0e4c448c-dcdc-4d06-9805-bad9cd8ae913", "address": "fa:16:3e:dc:79:ea", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e4c448c-dc", "ovs_interfaceid": "0e4c448c-dcdc-4d06-9805-bad9cd8ae913", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.372946] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "refresh_cache-55bf5d18-e16a-4fed-b11b-30ceb40c2d46" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.373270] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Instance network_info: |[{"id": "0e4c448c-dcdc-4d06-9805-bad9cd8ae913", "address": "fa:16:3e:dc:79:ea", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e4c448c-dc", "ovs_interfaceid": "0e4c448c-dcdc-4d06-9805-bad9cd8ae913", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2067.373658] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:79:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e4c448c-dcdc-4d06-9805-bad9cd8ae913', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2067.381210] env[67424]: DEBUG oslo.service.loopingcall [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.381671] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2067.381891] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df2e3fe8-17e8-44c0-84eb-10dc204f7187 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.396884] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2067.403213] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2067.403213] env[67424]: value = "task-3200088" [ 2067.403213] env[67424]: _type = "Task" [ 2067.403213] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.410978] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200088, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.913568] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200088, 'name': CreateVM_Task, 'duration_secs': 0.375457} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.913954] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2067.914409] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.914574] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.914879] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2067.915135] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cee96032-0696-4133-850b-5dc484086866 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.919303] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2067.919303] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b8083c-0c79-2009-2a4d-1f1ffb19ab21" [ 2067.919303] env[67424]: _type = "Task" [ 2067.919303] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.926767] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b8083c-0c79-2009-2a4d-1f1ffb19ab21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.387893] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2068.388157] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2068.388328] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2068.402377] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.402596] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.402873] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.402911] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2068.404151] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e58f14d-b813-47f8-95e1-4e9bf342c21a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.413510] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38edee4d-6043-4047-964c-2a32634f84bc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.430922] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6795c1cc-f92c-49e8-968b-8a9e7fe6d0a9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.440352] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.440573] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2068.440779] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.441788] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f813c1-aa14-4667-ad7f-e708fc4a6424 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.471679] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181000MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2068.471826] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.472028] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.538334] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.538515] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.538611] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.538730] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.538846] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.538958] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.539083] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.539198] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2068.539552] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2068.539552] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2068.631610] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c218398-3afd-40b3-be48-c41560211a10 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.639196] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c243951c-2eec-4201-a78a-0d3510abcf44 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.668321] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7054e0-5f6f-4bbf-8d42-42002f49a4c5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.674997] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ff188c-ca1d-45cb-afe0-45a3333b8d4f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.687678] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.697512] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2068.709747] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2068.709917] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.238s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.895541] env[67424]: DEBUG nova.compute.manager [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Received event network-changed-0e4c448c-dcdc-4d06-9805-bad9cd8ae913 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2068.895749] env[67424]: DEBUG nova.compute.manager [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Refreshing instance network info cache due to event network-changed-0e4c448c-dcdc-4d06-9805-bad9cd8ae913. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2068.895951] env[67424]: DEBUG oslo_concurrency.lockutils [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] Acquiring lock "refresh_cache-55bf5d18-e16a-4fed-b11b-30ceb40c2d46" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.896106] env[67424]: DEBUG oslo_concurrency.lockutils [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] Acquired lock "refresh_cache-55bf5d18-e16a-4fed-b11b-30ceb40c2d46" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.896267] env[67424]: DEBUG nova.network.neutron [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Refreshing network info cache for port 0e4c448c-dcdc-4d06-9805-bad9cd8ae913 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2069.139646] env[67424]: DEBUG nova.network.neutron [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Updated VIF entry in instance network info cache for port 0e4c448c-dcdc-4d06-9805-bad9cd8ae913. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2069.139993] env[67424]: DEBUG nova.network.neutron [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Updating instance_info_cache with network_info: [{"id": "0e4c448c-dcdc-4d06-9805-bad9cd8ae913", "address": "fa:16:3e:dc:79:ea", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e4c448c-dc", "ovs_interfaceid": "0e4c448c-dcdc-4d06-9805-bad9cd8ae913", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.149500] env[67424]: DEBUG oslo_concurrency.lockutils [req-c2cad096-906c-434f-9980-27b0fbab00ef req-93146d89-9f35-4aa1-a672-b9cdc993081c service nova] Releasing lock "refresh_cache-55bf5d18-e16a-4fed-b11b-30ceb40c2d46" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.710085] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2069.710260] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2069.710382] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2069.727413] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.727560] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.727689] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.727814] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.727934] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.728065] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.728189] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.728309] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2069.728428] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2075.858537] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "35c3054d-517b-4ea4-acd5-7135c07e4e9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.858846] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "35c3054d-517b-4ea4-acd5-7135c07e4e9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.870349] env[67424]: DEBUG nova.compute.manager [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2075.917426] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.917718] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.919215] env[67424]: INFO nova.compute.claims [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2076.063200] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2fada7-76c6-4faa-bb83-f2e935699b8a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.070817] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94479aa-73d9-4420-b945-cb06da0fd76c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.100969] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b52fd7-c4ba-4fd2-a7b3-547645108724 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.108180] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba6ddc1-db6f-4f1b-bd8f-e50dc708bf20 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.121251] env[67424]: DEBUG nova.compute.provider_tree [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2076.129703] env[67424]: DEBUG nova.scheduler.client.report [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2076.147926] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.230s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.148407] env[67424]: DEBUG nova.compute.manager [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2076.182788] env[67424]: DEBUG nova.compute.utils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2076.184158] env[67424]: DEBUG nova.compute.manager [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2076.184330] env[67424]: DEBUG nova.network.neutron [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2076.192008] env[67424]: DEBUG nova.compute.manager [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2076.237460] env[67424]: DEBUG nova.policy [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b65c4e3396554092b2d2227443e1566e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1175d0fb2c454022bcc36081c9df063d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 2076.253011] env[67424]: DEBUG nova.compute.manager [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2076.278315] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2076.278552] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2076.278707] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2076.278888] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2076.279044] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2076.279251] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2076.279391] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2076.279612] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2076.279802] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2076.279941] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2076.280164] env[67424]: DEBUG nova.virt.hardware [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2076.280989] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8251ae-9b22-473f-8594-68c9ad078b7e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.289265] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae790b3-779f-4726-b884-4688c822ca9a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.570429] env[67424]: DEBUG nova.network.neutron [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Successfully created port: c40ae929-026b-4d2f-8eba-28696330bb3e {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2077.073344] env[67424]: DEBUG nova.compute.manager [req-b6ab9aff-a3d9-4cc7-9fc1-9e71e320a6c3 req-48b2e106-09de-47f4-a9e1-df9284d34336 service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Received event network-vif-plugged-c40ae929-026b-4d2f-8eba-28696330bb3e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2077.073670] env[67424]: DEBUG oslo_concurrency.lockutils [req-b6ab9aff-a3d9-4cc7-9fc1-9e71e320a6c3 req-48b2e106-09de-47f4-a9e1-df9284d34336 service nova] Acquiring lock "35c3054d-517b-4ea4-acd5-7135c07e4e9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.073670] env[67424]: DEBUG oslo_concurrency.lockutils [req-b6ab9aff-a3d9-4cc7-9fc1-9e71e320a6c3 req-48b2e106-09de-47f4-a9e1-df9284d34336 service nova] Lock "35c3054d-517b-4ea4-acd5-7135c07e4e9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.073882] env[67424]: DEBUG oslo_concurrency.lockutils [req-b6ab9aff-a3d9-4cc7-9fc1-9e71e320a6c3 req-48b2e106-09de-47f4-a9e1-df9284d34336 service nova] Lock "35c3054d-517b-4ea4-acd5-7135c07e4e9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.073998] env[67424]: DEBUG nova.compute.manager [req-b6ab9aff-a3d9-4cc7-9fc1-9e71e320a6c3 req-48b2e106-09de-47f4-a9e1-df9284d34336 service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] No waiting events found dispatching network-vif-plugged-c40ae929-026b-4d2f-8eba-28696330bb3e {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2077.074186] env[67424]: WARNING nova.compute.manager [req-b6ab9aff-a3d9-4cc7-9fc1-9e71e320a6c3 req-48b2e106-09de-47f4-a9e1-df9284d34336 service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Received unexpected event network-vif-plugged-c40ae929-026b-4d2f-8eba-28696330bb3e for instance with vm_state building and task_state spawning. [ 2077.154567] env[67424]: DEBUG nova.network.neutron [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Successfully updated port: c40ae929-026b-4d2f-8eba-28696330bb3e {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2077.170595] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "refresh_cache-35c3054d-517b-4ea4-acd5-7135c07e4e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.170737] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "refresh_cache-35c3054d-517b-4ea4-acd5-7135c07e4e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.170888] env[67424]: DEBUG nova.network.neutron [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2077.209494] env[67424]: DEBUG nova.network.neutron [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2077.367335] env[67424]: DEBUG nova.network.neutron [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Updating instance_info_cache with network_info: [{"id": "c40ae929-026b-4d2f-8eba-28696330bb3e", "address": "fa:16:3e:7e:bc:19", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc40ae929-02", "ovs_interfaceid": "c40ae929-026b-4d2f-8eba-28696330bb3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.380668] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "refresh_cache-35c3054d-517b-4ea4-acd5-7135c07e4e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.381034] env[67424]: DEBUG nova.compute.manager [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Instance network_info: |[{"id": "c40ae929-026b-4d2f-8eba-28696330bb3e", "address": "fa:16:3e:7e:bc:19", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc40ae929-02", "ovs_interfaceid": "c40ae929-026b-4d2f-8eba-28696330bb3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2077.381455] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:bc:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24210a23-d8ac-4f4f-84ac-dc0636de9a72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c40ae929-026b-4d2f-8eba-28696330bb3e', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2077.389287] env[67424]: DEBUG oslo.service.loopingcall [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.389744] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2077.389980] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c62ea27f-b270-4136-b37a-a07b26386bac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.410178] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2077.410178] env[67424]: value = "task-3200089" [ 2077.410178] env[67424]: _type = "Task" [ 2077.410178] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.417846] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200089, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.921461] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200089, 'name': CreateVM_Task, 'duration_secs': 0.283903} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.921640] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2077.922290] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.922453] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.922760] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2077.923014] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eae5beb4-9f64-48b4-94d8-24624ec5ed65 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.927277] env[67424]: DEBUG oslo_vmware.api [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 2077.927277] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52da5169-7542-8cc2-f10d-5ca772815872" [ 2077.927277] env[67424]: _type = "Task" [ 2077.927277] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.934456] env[67424]: DEBUG oslo_vmware.api [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52da5169-7542-8cc2-f10d-5ca772815872, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.402601] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.436512] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.436750] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2078.436953] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.097759] env[67424]: DEBUG nova.compute.manager [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Received event network-changed-c40ae929-026b-4d2f-8eba-28696330bb3e {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2079.097955] env[67424]: DEBUG nova.compute.manager [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Refreshing instance network info cache due to event network-changed-c40ae929-026b-4d2f-8eba-28696330bb3e. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2079.098181] env[67424]: DEBUG oslo_concurrency.lockutils [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] Acquiring lock "refresh_cache-35c3054d-517b-4ea4-acd5-7135c07e4e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.098332] env[67424]: DEBUG oslo_concurrency.lockutils [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] Acquired lock "refresh_cache-35c3054d-517b-4ea4-acd5-7135c07e4e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.098486] env[67424]: DEBUG nova.network.neutron [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Refreshing network info cache for port c40ae929-026b-4d2f-8eba-28696330bb3e {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2079.349728] env[67424]: DEBUG nova.network.neutron [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Updated VIF entry in instance network info cache for port c40ae929-026b-4d2f-8eba-28696330bb3e. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2079.350101] env[67424]: DEBUG nova.network.neutron [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Updating instance_info_cache with network_info: [{"id": "c40ae929-026b-4d2f-8eba-28696330bb3e", "address": "fa:16:3e:7e:bc:19", "network": {"id": "19090f64-a096-4eff-a564-2ae48403f80f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1127438712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1175d0fb2c454022bcc36081c9df063d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc40ae929-02", "ovs_interfaceid": "c40ae929-026b-4d2f-8eba-28696330bb3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.359137] env[67424]: DEBUG oslo_concurrency.lockutils [req-97e615c8-659f-430f-b2e5-bee40a3136cf req-9760d43c-aebc-4561-a5f3-e81ec78c8d7e service nova] Releasing lock "refresh_cache-35c3054d-517b-4ea4-acd5-7135c07e4e9a" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.419174] env[67424]: WARNING oslo_vmware.rw_handles [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2082.419174] env[67424]: ERROR oslo_vmware.rw_handles [ 2082.419839] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2082.421779] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2082.422103] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Copying Virtual Disk [datastore2] vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/8e2fef09-014b-4589-af32-919c5c4f4b25/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2082.422427] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab61f1b8-6357-4abd-8fbf-10e8851fa077 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.429495] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 2082.429495] env[67424]: value = "task-3200090" [ 2082.429495] env[67424]: _type = "Task" [ 2082.429495] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.439860] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': task-3200090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.939403] env[67424]: DEBUG oslo_vmware.exceptions [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2082.939657] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.940236] env[67424]: ERROR nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2082.940236] env[67424]: Faults: ['InvalidArgument'] [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Traceback (most recent call last): [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] yield resources [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self.driver.spawn(context, instance, image_meta, [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self._fetch_image_if_missing(context, vi) [ 2082.940236] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] image_cache(vi, tmp_image_ds_loc) [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] vm_util.copy_virtual_disk( [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] session._wait_for_task(vmdk_copy_task) [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] return self.wait_for_task(task_ref) [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] return evt.wait() [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] result = hub.switch() [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2082.940694] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] return self.greenlet.switch() [ 2082.941157] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2082.941157] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self.f(*self.args, **self.kw) [ 2082.941157] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2082.941157] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] raise exceptions.translate_fault(task_info.error) [ 2082.941157] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2082.941157] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Faults: ['InvalidArgument'] [ 2082.941157] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] [ 2082.941157] env[67424]: INFO nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Terminating instance [ 2082.942130] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.942352] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2082.942595] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f226920-42a1-4e3b-bd89-4529b265544f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.945008] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2082.945220] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2082.945936] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff935ab-1c6e-4eb2-b581-a30999109232 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.952646] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2082.952898] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbcd6aa0-e5fb-40dd-bd15-a0559a0d1548 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.955007] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2082.955197] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2082.956200] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14399ba8-48c0-42fb-81ac-262a35ab8422 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.961082] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 2082.961082] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528a3c68-2b3b-2daf-771f-ee579647d440" [ 2082.961082] env[67424]: _type = "Task" [ 2082.961082] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.967965] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]528a3c68-2b3b-2daf-771f-ee579647d440, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.021231] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2083.021451] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2083.021628] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Deleting the datastore file [datastore2] 7e457262-ef1d-469e-8c36-b0f341a00e9a {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2083.021889] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e49c0d1-7595-4b22-810b-d10328b9b7dd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.027854] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for the task: (returnval){ [ 2083.027854] env[67424]: value = "task-3200092" [ 2083.027854] env[67424]: _type = "Task" [ 2083.027854] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.035016] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': task-3200092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.472067] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2083.472481] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating directory with path [datastore2] vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2083.472591] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-469c7ac5-0b20-47ec-b83d-a48b975fc1b6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.538760] env[67424]: DEBUG oslo_vmware.api [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Task: {'id': task-3200092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085917} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.539015] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2083.539220] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2083.539396] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2083.539566] env[67424]: INFO nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2083.541718] env[67424]: DEBUG nova.compute.claims [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2083.541890] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.542127] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.596639] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created directory with path [datastore2] vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2083.596639] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Fetch image to [datastore2] vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2083.596639] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2083.597521] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886f5e74-3445-4c31-8e08-d60ee24a6d7e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.605158] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd39b1a-1bbf-4e0d-90d4-8da878024733 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.616776] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213b2910-6723-46af-9f7a-00dd67459d5d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.649885] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c188a779-8432-4866-b21f-176a4a42c4f2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.656007] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-af501dec-893e-4688-954a-7e6c1d2bdb70 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.678244] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2083.736148] env[67424]: DEBUG oslo_vmware.rw_handles [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2083.738946] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682525bf-6041-442f-aa9f-5bbcdef71228 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.799076] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff9e0cf-c7f0-41bc-923b-c2a78f4060d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.804710] env[67424]: DEBUG oslo_vmware.rw_handles [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2083.804884] env[67424]: DEBUG oslo_vmware.rw_handles [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2083.831196] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcfeb0f-b5ea-447c-851f-25650cde81c2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.838340] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd20d4b-6de8-4838-927a-f078ee50e544 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.851061] env[67424]: DEBUG nova.compute.provider_tree [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2083.859794] env[67424]: DEBUG nova.scheduler.client.report [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2083.874468] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.332s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.874995] env[67424]: ERROR nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2083.874995] env[67424]: Faults: ['InvalidArgument'] [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Traceback (most recent call last): [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self.driver.spawn(context, instance, image_meta, [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self._fetch_image_if_missing(context, vi) [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] image_cache(vi, tmp_image_ds_loc) [ 2083.874995] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] vm_util.copy_virtual_disk( [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] session._wait_for_task(vmdk_copy_task) [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] return self.wait_for_task(task_ref) [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] return evt.wait() [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] result = hub.switch() [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] return self.greenlet.switch() [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2083.875379] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] self.f(*self.args, **self.kw) [ 2083.875747] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2083.875747] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] raise exceptions.translate_fault(task_info.error) [ 2083.875747] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2083.875747] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Faults: ['InvalidArgument'] [ 2083.875747] env[67424]: ERROR nova.compute.manager [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] [ 2083.875747] env[67424]: DEBUG nova.compute.utils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2083.877497] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Build of instance 7e457262-ef1d-469e-8c36-b0f341a00e9a was re-scheduled: A specified parameter was not correct: fileType [ 2083.877497] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2083.877877] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2083.878062] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2083.878239] env[67424]: DEBUG nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2083.878403] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2084.176271] env[67424]: DEBUG nova.network.neutron [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2084.191539] env[67424]: INFO nova.compute.manager [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Took 0.31 seconds to deallocate network for instance. [ 2084.279616] env[67424]: INFO nova.scheduler.client.report [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Deleted allocations for instance 7e457262-ef1d-469e-8c36-b0f341a00e9a [ 2084.307801] env[67424]: DEBUG oslo_concurrency.lockutils [None req-78568fb9-4e6b-4028-a7e8-3abea00d222c tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.674s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.308062] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.980s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.308283] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Acquiring lock "7e457262-ef1d-469e-8c36-b0f341a00e9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.308491] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.308655] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.310911] env[67424]: INFO nova.compute.manager [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Terminating instance [ 2084.313024] env[67424]: DEBUG nova.compute.manager [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2084.313024] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2084.313200] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4597811-2c9f-4a5e-bf63-2a50a56310cd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.322055] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ef7763-7838-4bfc-b323-319d0d0c14d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.349795] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7e457262-ef1d-469e-8c36-b0f341a00e9a could not be found. [ 2084.350044] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2084.350195] env[67424]: INFO nova.compute.manager [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2084.350436] env[67424]: DEBUG oslo.service.loopingcall [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2084.350638] env[67424]: DEBUG nova.compute.manager [-] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2084.350738] env[67424]: DEBUG nova.network.neutron [-] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2084.385132] env[67424]: DEBUG nova.network.neutron [-] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2084.392865] env[67424]: INFO nova.compute.manager [-] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] Took 0.04 seconds to deallocate network for instance. [ 2084.496688] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a8db7160-dbe8-4991-87c5-e0f389fb151b tempest-MultipleCreateTestJSON-1179382693 tempest-MultipleCreateTestJSON-1179382693-project-member] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.497513] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 237.751s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.497701] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 7e457262-ef1d-469e-8c36-b0f341a00e9a] During sync_power_state the instance has a pending task (deleting). Skip. [ 2084.497871] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "7e457262-ef1d-469e-8c36-b0f341a00e9a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.739570] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "909d3788-23da-446f-9c47-46df54003e1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.853204] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.853518] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.863909] env[67424]: DEBUG nova.compute.manager [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2093.910189] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.910432] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.911895] env[67424]: INFO nova.compute.claims [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2094.057124] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a50617-f62f-4c02-ac21-2179ec2bf1aa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.064419] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000f3b5b-2012-47fc-9c9b-6f891b6aa0fb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.094997] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e292732d-2b99-4b73-be67-1c71e4ee625d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.101464] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f973823-2500-4f91-9229-376ed1d40579 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.114240] env[67424]: DEBUG nova.compute.provider_tree [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2094.124335] env[67424]: DEBUG nova.scheduler.client.report [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2094.138316] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.228s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.138834] env[67424]: DEBUG nova.compute.manager [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2094.176214] env[67424]: DEBUG nova.compute.utils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2094.177644] env[67424]: DEBUG nova.compute.manager [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2094.177728] env[67424]: DEBUG nova.network.neutron [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2094.188269] env[67424]: DEBUG nova.compute.manager [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2094.246171] env[67424]: DEBUG nova.policy [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92130ff41b244634a170d38f14a7248a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37696549d9e343acb36d6e89f75713d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 2094.258024] env[67424]: DEBUG nova.compute.manager [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2094.282767] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2094.282993] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2094.283161] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2094.283334] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2094.283472] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2094.283611] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2094.283813] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2094.283963] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2094.284371] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2094.284561] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2094.285015] env[67424]: DEBUG nova.virt.hardware [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2094.285606] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed0ec80-f6c2-4df7-bb42-d8af0d6ce9a0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.296682] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547c9c3a-7fbd-4efc-9c6d-fe1e380b7bdb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.592663] env[67424]: DEBUG nova.network.neutron [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Successfully created port: cdba006c-43e4-4442-998b-f3ced5113721 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2095.150891] env[67424]: DEBUG nova.compute.manager [req-192a45da-0b9c-4616-ac2d-4a87294645d4 req-f3cf9b92-cf87-4cc2-bda8-42e667583edb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Received event network-vif-plugged-cdba006c-43e4-4442-998b-f3ced5113721 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2095.151148] env[67424]: DEBUG oslo_concurrency.lockutils [req-192a45da-0b9c-4616-ac2d-4a87294645d4 req-f3cf9b92-cf87-4cc2-bda8-42e667583edb service nova] Acquiring lock "4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.151306] env[67424]: DEBUG oslo_concurrency.lockutils [req-192a45da-0b9c-4616-ac2d-4a87294645d4 req-f3cf9b92-cf87-4cc2-bda8-42e667583edb service nova] Lock "4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.151447] env[67424]: DEBUG oslo_concurrency.lockutils [req-192a45da-0b9c-4616-ac2d-4a87294645d4 req-f3cf9b92-cf87-4cc2-bda8-42e667583edb service nova] Lock "4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2095.151608] env[67424]: DEBUG nova.compute.manager [req-192a45da-0b9c-4616-ac2d-4a87294645d4 req-f3cf9b92-cf87-4cc2-bda8-42e667583edb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] No waiting events found dispatching network-vif-plugged-cdba006c-43e4-4442-998b-f3ced5113721 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2095.151773] env[67424]: WARNING nova.compute.manager [req-192a45da-0b9c-4616-ac2d-4a87294645d4 req-f3cf9b92-cf87-4cc2-bda8-42e667583edb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Received unexpected event network-vif-plugged-cdba006c-43e4-4442-998b-f3ced5113721 for instance with vm_state building and task_state spawning. [ 2095.227820] env[67424]: DEBUG nova.network.neutron [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Successfully updated port: cdba006c-43e4-4442-998b-f3ced5113721 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2095.240214] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "refresh_cache-4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2095.240358] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquired lock "refresh_cache-4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2095.240505] env[67424]: DEBUG nova.network.neutron [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2095.280205] env[67424]: DEBUG nova.network.neutron [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2095.628340] env[67424]: DEBUG nova.network.neutron [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Updating instance_info_cache with network_info: [{"id": "cdba006c-43e4-4442-998b-f3ced5113721", "address": "fa:16:3e:09:49:5e", "network": {"id": "b78ca6e1-efbd-4327-a676-fee9c0461fb1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-166183820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37696549d9e343acb36d6e89f75713d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdba006c-43", "ovs_interfaceid": "cdba006c-43e4-4442-998b-f3ced5113721", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2095.640512] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Releasing lock "refresh_cache-4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2095.640778] env[67424]: DEBUG nova.compute.manager [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Instance network_info: |[{"id": "cdba006c-43e4-4442-998b-f3ced5113721", "address": "fa:16:3e:09:49:5e", "network": {"id": "b78ca6e1-efbd-4327-a676-fee9c0461fb1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-166183820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37696549d9e343acb36d6e89f75713d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdba006c-43", "ovs_interfaceid": "cdba006c-43e4-4442-998b-f3ced5113721", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2095.641247] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:49:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '233536d0-6913-4879-8442-42dcf1d4ecbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdba006c-43e4-4442-998b-f3ced5113721', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2095.648919] env[67424]: DEBUG oslo.service.loopingcall [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2095.649430] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2095.649721] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca75642c-2862-4d9c-bcdd-62368e49a866 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.670600] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2095.670600] env[67424]: value = "task-3200093" [ 2095.670600] env[67424]: _type = "Task" [ 2095.670600] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.679096] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200093, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.888529] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200093, 'name': CreateVM_Task, 'duration_secs': 0.283828} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.888961] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2096.889404] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2096.889573] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.889885] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2096.890315] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92274bee-bb01-4647-819b-3de11aa44fe9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.895256] env[67424]: DEBUG oslo_vmware.api [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for the task: (returnval){ [ 2096.895256] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b1891f-9616-ed54-abbb-b9d0c13c93aa" [ 2096.895256] env[67424]: _type = "Task" [ 2096.895256] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.902763] env[67424]: DEBUG oslo_vmware.api [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52b1891f-9616-ed54-abbb-b9d0c13c93aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.174339] env[67424]: DEBUG nova.compute.manager [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Received event network-changed-cdba006c-43e4-4442-998b-f3ced5113721 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2097.174394] env[67424]: DEBUG nova.compute.manager [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Refreshing instance network info cache due to event network-changed-cdba006c-43e4-4442-998b-f3ced5113721. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2097.174610] env[67424]: DEBUG oslo_concurrency.lockutils [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] Acquiring lock "refresh_cache-4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2097.174756] env[67424]: DEBUG oslo_concurrency.lockutils [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] Acquired lock "refresh_cache-4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2097.174914] env[67424]: DEBUG nova.network.neutron [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Refreshing network info cache for port cdba006c-43e4-4442-998b-f3ced5113721 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2097.405810] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2097.406082] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2097.406444] env[67424]: DEBUG oslo_concurrency.lockutils [None req-0df68880-a5f4-4caa-bdf4-1fafe0ad1e9d tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2097.411814] env[67424]: DEBUG nova.network.neutron [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Updated VIF entry in instance network info cache for port cdba006c-43e4-4442-998b-f3ced5113721. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2097.412201] env[67424]: DEBUG nova.network.neutron [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Updating instance_info_cache with network_info: [{"id": "cdba006c-43e4-4442-998b-f3ced5113721", "address": "fa:16:3e:09:49:5e", "network": {"id": "b78ca6e1-efbd-4327-a676-fee9c0461fb1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-166183820-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37696549d9e343acb36d6e89f75713d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdba006c-43", "ovs_interfaceid": "cdba006c-43e4-4442-998b-f3ced5113721", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2097.421314] env[67424]: DEBUG oslo_concurrency.lockutils [req-46edd8e6-9db4-419a-922a-46b3ca4155c3 req-c8951189-0c72-4961-9a8f-b11833811cfb service nova] Releasing lock "refresh_cache-4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.391707] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.392062] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances with incomplete migration {{(pid=67424) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2116.388488] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2122.398952] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.383478] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.387088] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.387242] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2127.387897] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.388296] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.388343] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.388478] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2127.399335] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] There are 0 instances to clean {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2128.398494] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.398817] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.410610] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.410884] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.411103] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.411492] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2128.412489] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e95a8ae-369d-4f14-ae84-e7f6602a4856 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.421039] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55745ce-66ce-4b76-8db0-3c00665eeaed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.435235] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebba052-704d-4911-8b2c-bb3c01bfb172 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.441153] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be869234-9786-4389-8873-cbc9081fafe9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.468504] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180989MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2128.468682] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.468915] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.551522] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.551682] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.551808] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.551930] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.552124] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.552230] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.552344] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.552455] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 35c3054d-517b-4ea4-acd5-7135c07e4e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.552567] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.552745] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2128.552876] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2128.653910] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29fce4c-faed-4430-a64e-ec93c33766f5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.661211] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cb9076-a8cb-4748-b01f-cab734ed353a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.691196] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb14d0a-707f-4907-8ecf-d4d0fb5471a2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.697796] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649c5f14-d3a3-44f5-b0e3-e1046753be6d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.710290] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2128.718044] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2128.732333] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2128.732500] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.264s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.721827] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.389020] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.389020] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2131.389020] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2131.408042] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.408213] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.408341] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.408462] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.408580] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.408696] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.408812] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.408924] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.409052] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2131.409173] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2132.436063] env[67424]: WARNING oslo_vmware.rw_handles [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2132.436063] env[67424]: ERROR oslo_vmware.rw_handles [ 2132.436769] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2132.438718] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2132.439009] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Copying Virtual Disk [datastore2] vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/592d0622-12a5-44b8-a6cb-fcfa3ce7a16b/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2132.439614] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e33baf25-9020-49d7-ae50-bba7ec0774b3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.448314] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 2132.448314] env[67424]: value = "task-3200094" [ 2132.448314] env[67424]: _type = "Task" [ 2132.448314] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.456231] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.958384] env[67424]: DEBUG oslo_vmware.exceptions [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2132.958668] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2132.959261] env[67424]: ERROR nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2132.959261] env[67424]: Faults: ['InvalidArgument'] [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Traceback (most recent call last): [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] yield resources [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self.driver.spawn(context, instance, image_meta, [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self._fetch_image_if_missing(context, vi) [ 2132.959261] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] image_cache(vi, tmp_image_ds_loc) [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] vm_util.copy_virtual_disk( [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] session._wait_for_task(vmdk_copy_task) [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] return self.wait_for_task(task_ref) [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] return evt.wait() [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] result = hub.switch() [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2132.959698] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] return self.greenlet.switch() [ 2132.960158] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2132.960158] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self.f(*self.args, **self.kw) [ 2132.960158] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2132.960158] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] raise exceptions.translate_fault(task_info.error) [ 2132.960158] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2132.960158] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Faults: ['InvalidArgument'] [ 2132.960158] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] [ 2132.960158] env[67424]: INFO nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Terminating instance [ 2132.961172] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.961378] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2132.961622] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a71139b-54d8-4277-8f3f-3e03716e2976 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.963879] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2132.964082] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2132.964819] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6dfd56-2ba0-494f-80c4-e89488e627bd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.971731] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2132.971950] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc5d91c9-fa7e-4271-8f57-978659fddfe2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.974179] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2132.974356] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2132.975289] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02f2c47f-2521-44d7-9423-d21c98fdb553 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.979970] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2132.979970] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521c0d2d-ca96-f57f-e3cf-2dab6a267606" [ 2132.979970] env[67424]: _type = "Task" [ 2132.979970] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.987097] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521c0d2d-ca96-f57f-e3cf-2dab6a267606, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.044758] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2133.044945] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2133.045114] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleting the datastore file [datastore2] a909e0f2-5717-469f-83f2-4b07f03e2ff6 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2133.045394] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04f06c6a-1bf7-408f-920d-9bf7cb7854c8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.051206] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 2133.051206] env[67424]: value = "task-3200096" [ 2133.051206] env[67424]: _type = "Task" [ 2133.051206] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.058667] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.490408] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2133.490779] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating directory with path [datastore2] vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2133.490966] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-549d2b27-7d85-4508-bd36-42373ef72625 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.501900] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created directory with path [datastore2] vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2133.502172] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Fetch image to [datastore2] vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2133.502404] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2133.503163] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9658259d-8a15-4a7d-ac20-39f2fdd21910 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.510449] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9044a12-5389-4a45-b69a-745ec1cb22e8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.519036] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8664945-2ae1-479c-a9a6-b9ac82ab78b0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.551116] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b40825-13e8-41c8-adbd-ce4217ca9ce2 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.563066] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-da96c161-f9b5-4aa5-9d7e-55858575b0d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.563791] env[67424]: DEBUG oslo_vmware.api [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077798} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.564054] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2133.564238] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2133.564422] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2133.564626] env[67424]: INFO nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2133.566770] env[67424]: DEBUG nova.compute.claims [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2133.566942] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.567176] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.586173] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2133.639410] env[67424]: DEBUG oslo_vmware.rw_handles [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2133.699661] env[67424]: DEBUG oslo_vmware.rw_handles [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2133.699797] env[67424]: DEBUG oslo_vmware.rw_handles [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2133.786188] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4ea2bf-8c55-42ff-9bff-14dd5a5a4844 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.794161] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d9936c-2095-40dd-b07e-45fa5b660df5 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.824391] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f916d437-d78f-4376-b6bd-d2b1d8003c50 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.831558] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339f905d-98ea-4ead-ba0f-31ace36e92ce {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.844452] env[67424]: DEBUG nova.compute.provider_tree [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2133.853477] env[67424]: DEBUG nova.scheduler.client.report [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2133.866671] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.299s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.867187] env[67424]: ERROR nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2133.867187] env[67424]: Faults: ['InvalidArgument'] [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Traceback (most recent call last): [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self.driver.spawn(context, instance, image_meta, [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self._fetch_image_if_missing(context, vi) [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] image_cache(vi, tmp_image_ds_loc) [ 2133.867187] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] vm_util.copy_virtual_disk( [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] session._wait_for_task(vmdk_copy_task) [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] return self.wait_for_task(task_ref) [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] return evt.wait() [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] result = hub.switch() [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] return self.greenlet.switch() [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2133.867562] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] self.f(*self.args, **self.kw) [ 2133.868100] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2133.868100] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] raise exceptions.translate_fault(task_info.error) [ 2133.868100] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2133.868100] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Faults: ['InvalidArgument'] [ 2133.868100] env[67424]: ERROR nova.compute.manager [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] [ 2133.868100] env[67424]: DEBUG nova.compute.utils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2133.869228] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Build of instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 was re-scheduled: A specified parameter was not correct: fileType [ 2133.869228] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2133.869591] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2133.869762] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2133.869929] env[67424]: DEBUG nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2133.870103] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2134.204214] env[67424]: DEBUG nova.network.neutron [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.217109] env[67424]: INFO nova.compute.manager [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Took 0.35 seconds to deallocate network for instance. [ 2134.307722] env[67424]: INFO nova.scheduler.client.report [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleted allocations for instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 [ 2134.331680] env[67424]: DEBUG oslo_concurrency.lockutils [None req-50fa98fb-c524-4e9b-a0a0-1271ffb8660e tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.738s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.331680] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.410s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.331947] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.332028] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.332242] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.334587] env[67424]: INFO nova.compute.manager [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Terminating instance [ 2134.336314] env[67424]: DEBUG nova.compute.manager [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2134.336537] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2134.337050] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f1cefff-cb0b-4f8c-88d9-6f1f7aa893c6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.346089] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d732f055-859b-49da-8ae8-77237e882c87 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.376958] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a909e0f2-5717-469f-83f2-4b07f03e2ff6 could not be found. [ 2134.377227] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2134.377460] env[67424]: INFO nova.compute.manager [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2134.377731] env[67424]: DEBUG oslo.service.loopingcall [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2134.377988] env[67424]: DEBUG nova.compute.manager [-] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2134.378082] env[67424]: DEBUG nova.network.neutron [-] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2134.401984] env[67424]: DEBUG nova.network.neutron [-] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.411693] env[67424]: INFO nova.compute.manager [-] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] Took 0.03 seconds to deallocate network for instance. [ 2134.509168] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d5b2a5d2-5cb2-4e90-b99b-5f233de6c0ef tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.177s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.510528] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 287.764s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.510682] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: a909e0f2-5717-469f-83f2-4b07f03e2ff6] During sync_power_state the instance has a pending task (deleting). Skip. [ 2134.510881] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "a909e0f2-5717-469f-83f2-4b07f03e2ff6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.855463] env[67424]: WARNING oslo_vmware.rw_handles [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2182.855463] env[67424]: ERROR oslo_vmware.rw_handles [ 2182.856118] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2182.857771] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2182.858014] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Copying Virtual Disk [datastore2] vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/84913ca7-c0e6-4bec-a2de-9333005363e4/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2182.858312] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-794c0a88-821d-47b4-947f-38b111d158f8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.866415] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2182.866415] env[67424]: value = "task-3200097" [ 2182.866415] env[67424]: _type = "Task" [ 2182.866415] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.874083] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.377529] env[67424]: DEBUG oslo_vmware.exceptions [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2183.377886] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.378422] env[67424]: ERROR nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2183.378422] env[67424]: Faults: ['InvalidArgument'] [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Traceback (most recent call last): [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] yield resources [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self.driver.spawn(context, instance, image_meta, [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self._fetch_image_if_missing(context, vi) [ 2183.378422] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] image_cache(vi, tmp_image_ds_loc) [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] vm_util.copy_virtual_disk( [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] session._wait_for_task(vmdk_copy_task) [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] return self.wait_for_task(task_ref) [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] return evt.wait() [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] result = hub.switch() [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2183.378817] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] return self.greenlet.switch() [ 2183.379228] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2183.379228] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self.f(*self.args, **self.kw) [ 2183.379228] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2183.379228] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] raise exceptions.translate_fault(task_info.error) [ 2183.379228] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2183.379228] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Faults: ['InvalidArgument'] [ 2183.379228] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] [ 2183.379228] env[67424]: INFO nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Terminating instance [ 2183.380454] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.380679] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2183.380950] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab3421a5-d5e9-42c4-b348-1bddf1954d30 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.384737] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2183.384979] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2183.385705] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb08a9a2-d6a6-4b7b-ab2c-0f021a73a7ae {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.390194] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2183.390367] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2183.391394] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef1c6c59-928a-42a7-9199-3b3bd9ca6f06 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.395361] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2183.395826] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a823354e-3525-4c15-8a20-2f2b64664ecc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.398037] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 2183.398037] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52dc4a9b-6d70-4d2e-4b1e-38663f8a6c71" [ 2183.398037] env[67424]: _type = "Task" [ 2183.398037] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.405341] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52dc4a9b-6d70-4d2e-4b1e-38663f8a6c71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.464873] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2183.465111] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2183.465302] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleting the datastore file [datastore2] bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2183.465572] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fd568e1-a386-4ad8-bb0f-7a9aba67f7d7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.473009] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2183.473009] env[67424]: value = "task-3200099" [ 2183.473009] env[67424]: _type = "Task" [ 2183.473009] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.480209] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.909425] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2183.909798] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating directory with path [datastore2] vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2183.909857] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e7de0a5-36c4-45e1-8da4-334c44763083 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.921085] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created directory with path [datastore2] vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2183.921269] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Fetch image to [datastore2] vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2183.921443] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2183.922155] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381fa1be-303e-416a-b077-16f7d7a50114 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.928154] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aef54a2-bf4a-4e03-acfa-04fa01a4f7ed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.937074] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0835a857-50a8-40d7-b392-07a335fd4843 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.967945] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262c715e-4532-4d09-b9a7-93b59764ff1d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.977093] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7124efbc-7b2a-4561-89b0-c4c8b886fd74 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.983352] env[67424]: DEBUG oslo_vmware.api [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081175} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.983597] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2183.983781] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2183.983961] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2183.984208] env[67424]: INFO nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2183.986281] env[67424]: DEBUG nova.compute.claims [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2183.986453] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.986665] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.998143] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2184.049269] env[67424]: DEBUG oslo_vmware.rw_handles [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2184.110314] env[67424]: DEBUG oslo_vmware.rw_handles [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2184.110550] env[67424]: DEBUG oslo_vmware.rw_handles [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2184.178388] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28ed198-0e1c-43c2-ad59-d17e70941f53 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.186306] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb11d5e0-c045-462d-bb06-91792cfaa6db {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.216530] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5ea160-537e-4698-aa8b-d9b1991259f4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.223546] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec196c0c-0e53-4d13-9962-9232e421a69d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.236408] env[67424]: DEBUG nova.compute.provider_tree [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2184.247711] env[67424]: DEBUG nova.scheduler.client.report [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2184.261570] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.275s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.262151] env[67424]: ERROR nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.262151] env[67424]: Faults: ['InvalidArgument'] [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Traceback (most recent call last): [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self.driver.spawn(context, instance, image_meta, [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self._fetch_image_if_missing(context, vi) [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] image_cache(vi, tmp_image_ds_loc) [ 2184.262151] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] vm_util.copy_virtual_disk( [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] session._wait_for_task(vmdk_copy_task) [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] return self.wait_for_task(task_ref) [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] return evt.wait() [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] result = hub.switch() [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] return self.greenlet.switch() [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2184.262600] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] self.f(*self.args, **self.kw) [ 2184.262969] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2184.262969] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] raise exceptions.translate_fault(task_info.error) [ 2184.262969] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.262969] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Faults: ['InvalidArgument'] [ 2184.262969] env[67424]: ERROR nova.compute.manager [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] [ 2184.262969] env[67424]: DEBUG nova.compute.utils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2184.264323] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Build of instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 was re-scheduled: A specified parameter was not correct: fileType [ 2184.264323] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2184.264723] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2184.264900] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2184.265084] env[67424]: DEBUG nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2184.265253] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2184.387871] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2184.576814] env[67424]: DEBUG nova.network.neutron [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2184.591339] env[67424]: INFO nova.compute.manager [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Took 0.33 seconds to deallocate network for instance. [ 2184.684056] env[67424]: INFO nova.scheduler.client.report [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleted allocations for instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 [ 2184.704113] env[67424]: DEBUG oslo_concurrency.lockutils [None req-d14ea8c1-113c-4aab-8f79-2be17f67c58d tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 511.765s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.704461] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 337.958s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.704749] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] During sync_power_state the instance has a pending task (spawning). Skip. [ 2184.705794] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.706353] env[67424]: DEBUG oslo_concurrency.lockutils [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 315.790s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.706628] env[67424]: DEBUG oslo_concurrency.lockutils [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.706940] env[67424]: DEBUG oslo_concurrency.lockutils [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.707218] env[67424]: DEBUG oslo_concurrency.lockutils [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2184.711755] env[67424]: INFO nova.compute.manager [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Terminating instance [ 2184.713751] env[67424]: DEBUG nova.compute.manager [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2184.714038] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2184.714386] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d23b70c7-7a06-42c2-b67b-81614fd58d16 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.725393] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6f0757-fb6d-4a83-b4a0-9e2936feb82f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.752479] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb5764fe-791a-45d2-ba8c-fbdd2aed1e61 could not be found. [ 2184.752679] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2184.752857] env[67424]: INFO nova.compute.manager [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2184.753135] env[67424]: DEBUG oslo.service.loopingcall [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2184.753354] env[67424]: DEBUG nova.compute.manager [-] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2184.753452] env[67424]: DEBUG nova.network.neutron [-] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2184.774826] env[67424]: DEBUG nova.network.neutron [-] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2184.782340] env[67424]: INFO nova.compute.manager [-] [instance: bb5764fe-791a-45d2-ba8c-fbdd2aed1e61] Took 0.03 seconds to deallocate network for instance. [ 2184.865787] env[67424]: DEBUG oslo_concurrency.lockutils [None req-39a82e93-f51b-48b3-9dd5-690b128807ae tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "bb5764fe-791a-45d2-ba8c-fbdd2aed1e61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.387829] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.388236] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2187.388919] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.383627] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.387264] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.398687] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.399049] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.399167] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.399246] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2188.400354] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbda7ca-4f71-4c76-9486-90031e4c1d94 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.408984] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af9bfba-8876-42a9-ab3e-9e31e696dd92 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.422747] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31d494a-16ab-4e5e-9c54-e37eeed07aac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.429049] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1326409-4777-4381-8493-4766f68f095a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.457783] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181003MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2188.457945] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.458164] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.518417] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2188.518594] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2188.518724] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2188.518847] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2188.518967] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2188.519098] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 35c3054d-517b-4ea4-acd5-7135c07e4e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2188.519218] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2188.519400] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2188.519537] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2188.605769] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc40b336-7a08-433b-ab6d-4ae6f3516251 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.613627] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca258ac4-1187-4604-a5df-96c60bf6e850 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.643772] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cedc08-e920-4d68-9b24-820248fc481c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.650885] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7a3a90-acf0-404f-af94-e37c197e112e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.663588] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2188.671644] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2188.685204] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2188.685383] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.227s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.686059] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.686391] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.387685] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.388085] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2191.388085] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2191.405044] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2191.405208] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2191.405336] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2191.405464] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2191.405584] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2191.405704] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2191.405822] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2191.405941] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2191.406451] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.402926] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2232.869966] env[67424]: WARNING oslo_vmware.rw_handles [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2232.869966] env[67424]: ERROR oslo_vmware.rw_handles [ 2232.870716] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2232.872562] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2232.872801] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Copying Virtual Disk [datastore2] vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/7e2b8ba5-cef7-4a2b-b832-8ccc25e0b1f9/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2232.873108] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edc3686e-b251-487d-acde-ed9c479d0c4e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.880622] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 2232.880622] env[67424]: value = "task-3200100" [ 2232.880622] env[67424]: _type = "Task" [ 2232.880622] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.887959] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': task-3200100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.391271] env[67424]: DEBUG oslo_vmware.exceptions [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2233.391546] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2233.392108] env[67424]: ERROR nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2233.392108] env[67424]: Faults: ['InvalidArgument'] [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Traceback (most recent call last): [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] yield resources [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self.driver.spawn(context, instance, image_meta, [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self._fetch_image_if_missing(context, vi) [ 2233.392108] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] image_cache(vi, tmp_image_ds_loc) [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] vm_util.copy_virtual_disk( [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] session._wait_for_task(vmdk_copy_task) [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] return self.wait_for_task(task_ref) [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] return evt.wait() [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] result = hub.switch() [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2233.392780] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] return self.greenlet.switch() [ 2233.393276] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2233.393276] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self.f(*self.args, **self.kw) [ 2233.393276] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2233.393276] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] raise exceptions.translate_fault(task_info.error) [ 2233.393276] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2233.393276] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Faults: ['InvalidArgument'] [ 2233.393276] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] [ 2233.393276] env[67424]: INFO nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Terminating instance [ 2233.393997] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2233.394228] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2233.394497] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7b9c980-10f2-4d05-9103-c5485bed6811 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.396567] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2233.396763] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2233.397463] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe5f8c0-eea8-4ec6-b125-feab8b639833 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.405462] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2233.405655] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90f6053f-3ca1-4ab9-80dd-8565bebdbe9c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.407906] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2233.408187] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2233.409231] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87737104-fc99-4330-b393-3ea2c648cd12 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.413612] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for the task: (returnval){ [ 2233.413612] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52093ac9-7355-5f11-b52b-4ccb4ad8dda1" [ 2233.413612] env[67424]: _type = "Task" [ 2233.413612] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.420595] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52093ac9-7355-5f11-b52b-4ccb4ad8dda1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.491223] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2233.491466] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2233.491621] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Deleting the datastore file [datastore2] 08d3abb5-1041-4dd2-a0e3-af33e7c2194f {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2233.491884] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e7ab92a-12ea-4263-87f5-61257eca35ec {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.498015] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 2233.498015] env[67424]: value = "task-3200102" [ 2233.498015] env[67424]: _type = "Task" [ 2233.498015] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.505336] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': task-3200102, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.924022] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2233.924306] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Creating directory with path [datastore2] vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2233.924569] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba69d261-338c-4387-bc4f-b87415e9c874 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.003402] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Created directory with path [datastore2] vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2234.003645] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Fetch image to [datastore2] vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2234.003791] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2234.004955] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd65681-bcb8-47bf-8054-bc4db1ecac80 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.010225] env[67424]: DEBUG oslo_vmware.api [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': task-3200102, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063399} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.010756] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2234.010940] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2234.011129] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2234.011304] env[67424]: INFO nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2234.014967] env[67424]: DEBUG nova.compute.claims [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2234.015160] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.015373] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.018582] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53061a22-cdba-4dc6-a0eb-119ed34e0675 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.027895] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f767852b-0c0c-48f6-a780-3c0ad97c948f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.057949] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c01b0e-87d3-4faa-a63a-8514f4e85708 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.067809] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3c7cbf5a-b2b1-43ae-8d67-4ab7c70bff51 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.088783] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2234.150721] env[67424]: DEBUG oslo_vmware.rw_handles [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2234.210951] env[67424]: DEBUG oslo_vmware.rw_handles [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2234.211193] env[67424]: DEBUG oslo_vmware.rw_handles [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2234.220631] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1223b01-b203-429c-ae9b-ceaf84aecf03 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.229749] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099503bf-6230-49f1-be9d-6dcc8d7a4c6e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.258498] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9c805c-876a-43ae-9a02-68fb62e61e30 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.264914] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997df5b0-670f-4b57-b648-b553c30621ff {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.277392] env[67424]: DEBUG nova.compute.provider_tree [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2234.287289] env[67424]: DEBUG nova.scheduler.client.report [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2234.301294] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.286s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.301811] env[67424]: ERROR nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2234.301811] env[67424]: Faults: ['InvalidArgument'] [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Traceback (most recent call last): [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self.driver.spawn(context, instance, image_meta, [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self._fetch_image_if_missing(context, vi) [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] image_cache(vi, tmp_image_ds_loc) [ 2234.301811] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] vm_util.copy_virtual_disk( [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] session._wait_for_task(vmdk_copy_task) [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] return self.wait_for_task(task_ref) [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] return evt.wait() [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] result = hub.switch() [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] return self.greenlet.switch() [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2234.302337] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] self.f(*self.args, **self.kw) [ 2234.302864] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2234.302864] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] raise exceptions.translate_fault(task_info.error) [ 2234.302864] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2234.302864] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Faults: ['InvalidArgument'] [ 2234.302864] env[67424]: ERROR nova.compute.manager [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] [ 2234.302864] env[67424]: DEBUG nova.compute.utils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2234.303910] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Build of instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f was re-scheduled: A specified parameter was not correct: fileType [ 2234.303910] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2234.304295] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2234.304472] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2234.304673] env[67424]: DEBUG nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2234.304842] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2234.608401] env[67424]: DEBUG nova.network.neutron [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2234.619789] env[67424]: INFO nova.compute.manager [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Took 0.31 seconds to deallocate network for instance. [ 2234.710425] env[67424]: INFO nova.scheduler.client.report [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Deleted allocations for instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f [ 2234.737401] env[67424]: DEBUG oslo_concurrency.lockutils [None req-aa97d537-21bc-4aee-8d99-e2b1b458b704 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 552.877s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.737685] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 387.991s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.737878] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] During sync_power_state the instance has a pending task (spawning). Skip. [ 2234.738061] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.738580] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 356.510s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.738803] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.739017] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.739215] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.741497] env[67424]: INFO nova.compute.manager [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Terminating instance [ 2234.743478] env[67424]: DEBUG nova.compute.manager [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2234.743684] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2234.744240] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee2db698-7a45-4b35-9c0f-1590b6a8d83f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.753727] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6abffe9-4388-4bed-8c34-5fce6e166f2b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.783521] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 08d3abb5-1041-4dd2-a0e3-af33e7c2194f could not be found. [ 2234.783750] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2234.783898] env[67424]: INFO nova.compute.manager [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2234.784192] env[67424]: DEBUG oslo.service.loopingcall [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2234.784422] env[67424]: DEBUG nova.compute.manager [-] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2234.784512] env[67424]: DEBUG nova.network.neutron [-] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2234.814372] env[67424]: DEBUG nova.network.neutron [-] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2234.824143] env[67424]: INFO nova.compute.manager [-] [instance: 08d3abb5-1041-4dd2-a0e3-af33e7c2194f] Took 0.04 seconds to deallocate network for instance. [ 2234.920543] env[67424]: DEBUG oslo_concurrency.lockutils [None req-a92d95c6-7516-4f61-a2d0-53e7f8a1b02e tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Lock "08d3abb5-1041-4dd2-a0e3-af33e7c2194f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.387551] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.387968] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.387968] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2248.388381] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2249.383604] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2250.390628] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2250.390892] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2250.403494] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.403494] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2250.403494] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.403796] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2250.404708] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2615929-6fd1-4f72-ab57-37e16931434e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.413703] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce867c1-ac08-41d1-a496-0fe8f1ed62cf {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.428067] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df44b453-0443-47d0-8b1c-aa6726c6ccda {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.434994] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8408fb2e-a57e-4ba1-9622-5099b4ec91d3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.468230] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180992MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2250.468400] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.468671] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2250.536831] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2250.537012] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2250.537153] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2250.537273] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2250.537396] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 35c3054d-517b-4ea4-acd5-7135c07e4e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2250.537514] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2250.537690] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2250.538402] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2250.646067] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0c130f-bad8-4f84-a1b1-624680e1fc04 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.654259] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad12da81-9973-4262-b080-9c1fb8161d62 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.684437] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1808323-7758-4d71-afca-a8f26c2d26cd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.691996] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ba5277-4ea8-4fe6-af0e-7720a6b06fa9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.705213] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2250.714775] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2250.729267] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2250.729462] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.261s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.726961] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.903952] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquiring lock "0ad7bf34-a280-4a03-9398-974f0716fa88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.904104] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Lock "0ad7bf34-a280-4a03-9398-974f0716fa88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.917903] env[67424]: DEBUG nova.compute.manager [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2251.970932] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.971222] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.972827] env[67424]: INFO nova.compute.claims [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2252.131287] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36ade9f-9e2c-4eb3-98f0-5577cd8b75b6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.139256] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab6946f-5661-4c8d-9976-a93228c43a29 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.168864] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba744225-bdb5-4caa-9b3f-6738e5945f8b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.175856] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343c65d2-06ff-4822-9237-a8a5acfaab40 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.189056] env[67424]: DEBUG nova.compute.provider_tree [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2252.198050] env[67424]: DEBUG nova.scheduler.client.report [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2252.211910] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.241s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.212441] env[67424]: DEBUG nova.compute.manager [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2252.248363] env[67424]: DEBUG nova.compute.utils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2252.249655] env[67424]: DEBUG nova.compute.manager [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2252.249817] env[67424]: DEBUG nova.network.neutron [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2252.263395] env[67424]: DEBUG nova.compute.manager [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2252.317629] env[67424]: DEBUG nova.policy [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3db9f71694bd4793ae864c9a55247961', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '267735d9526147c9b5fc6cd41806abab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 2252.328885] env[67424]: DEBUG nova.compute.manager [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2252.358284] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2252.358534] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2252.358693] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2252.358877] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2252.359037] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2252.359208] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2252.359453] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2252.359619] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2252.359782] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2252.359945] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2252.360186] env[67424]: DEBUG nova.virt.hardware [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2252.361071] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98649fd3-0cf9-430f-a14e-920d00bafc1c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.369358] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c3e075-874b-4323-8146-a61bc147319a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.387216] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.387380] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2252.387539] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2252.404695] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2252.404904] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2252.405036] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2252.405168] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2252.405294] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2252.405419] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2252.405537] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2252.405656] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2252.406147] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.656174] env[67424]: DEBUG nova.network.neutron [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Successfully created port: 9751b36d-b2d2-416e-a03b-76989dfea16a {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2253.288798] env[67424]: DEBUG nova.network.neutron [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Successfully updated port: 9751b36d-b2d2-416e-a03b-76989dfea16a {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2253.302372] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquiring lock "refresh_cache-0ad7bf34-a280-4a03-9398-974f0716fa88" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2253.302372] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquired lock "refresh_cache-0ad7bf34-a280-4a03-9398-974f0716fa88" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2253.302372] env[67424]: DEBUG nova.network.neutron [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2253.333780] env[67424]: DEBUG nova.compute.manager [req-b003ac88-da72-4983-8e8f-8c590cf2e00b req-2c5412a0-2a6c-4fff-9108-f3d1d60da771 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Received event network-vif-plugged-9751b36d-b2d2-416e-a03b-76989dfea16a {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2253.333780] env[67424]: DEBUG oslo_concurrency.lockutils [req-b003ac88-da72-4983-8e8f-8c590cf2e00b req-2c5412a0-2a6c-4fff-9108-f3d1d60da771 service nova] Acquiring lock "0ad7bf34-a280-4a03-9398-974f0716fa88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2253.333780] env[67424]: DEBUG oslo_concurrency.lockutils [req-b003ac88-da72-4983-8e8f-8c590cf2e00b req-2c5412a0-2a6c-4fff-9108-f3d1d60da771 service nova] Lock "0ad7bf34-a280-4a03-9398-974f0716fa88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.333780] env[67424]: DEBUG oslo_concurrency.lockutils [req-b003ac88-da72-4983-8e8f-8c590cf2e00b req-2c5412a0-2a6c-4fff-9108-f3d1d60da771 service nova] Lock "0ad7bf34-a280-4a03-9398-974f0716fa88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2253.334094] env[67424]: DEBUG nova.compute.manager [req-b003ac88-da72-4983-8e8f-8c590cf2e00b req-2c5412a0-2a6c-4fff-9108-f3d1d60da771 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] No waiting events found dispatching network-vif-plugged-9751b36d-b2d2-416e-a03b-76989dfea16a {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2253.334094] env[67424]: WARNING nova.compute.manager [req-b003ac88-da72-4983-8e8f-8c590cf2e00b req-2c5412a0-2a6c-4fff-9108-f3d1d60da771 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Received unexpected event network-vif-plugged-9751b36d-b2d2-416e-a03b-76989dfea16a for instance with vm_state building and task_state spawning. [ 2253.345654] env[67424]: DEBUG nova.network.neutron [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2253.507624] env[67424]: DEBUG nova.network.neutron [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Updating instance_info_cache with network_info: [{"id": "9751b36d-b2d2-416e-a03b-76989dfea16a", "address": "fa:16:3e:f4:17:e1", "network": {"id": "25cfc1e7-f0b2-46fa-ab3c-1c5cc06ae791", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1975847852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "267735d9526147c9b5fc6cd41806abab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9751b36d-b2", "ovs_interfaceid": "9751b36d-b2d2-416e-a03b-76989dfea16a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2253.519597] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Releasing lock "refresh_cache-0ad7bf34-a280-4a03-9398-974f0716fa88" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2253.519874] env[67424]: DEBUG nova.compute.manager [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Instance network_info: |[{"id": "9751b36d-b2d2-416e-a03b-76989dfea16a", "address": "fa:16:3e:f4:17:e1", "network": {"id": "25cfc1e7-f0b2-46fa-ab3c-1c5cc06ae791", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1975847852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "267735d9526147c9b5fc6cd41806abab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9751b36d-b2", "ovs_interfaceid": "9751b36d-b2d2-416e-a03b-76989dfea16a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2253.520321] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:17:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1d468f87-964a-4fb6-bab3-b83f6f2646b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9751b36d-b2d2-416e-a03b-76989dfea16a', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2253.528063] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Creating folder: Project (267735d9526147c9b5fc6cd41806abab). Parent ref: group-v639843. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2253.528541] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56ce2322-d3bc-4c08-802f-98c87f84af2f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.540075] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Created folder: Project (267735d9526147c9b5fc6cd41806abab) in parent group-v639843. [ 2253.540260] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Creating folder: Instances. Parent ref: group-v639948. {{(pid=67424) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2253.540484] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-509fa98d-fedd-4a8a-b29f-cfd90067e0ef {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.548957] env[67424]: INFO nova.virt.vmwareapi.vm_util [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Created folder: Instances in parent group-v639948. [ 2253.549192] env[67424]: DEBUG oslo.service.loopingcall [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2253.549365] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2253.549550] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10dec913-35c9-40b2-b2af-58d499952486 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.566958] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2253.566958] env[67424]: value = "task-3200105" [ 2253.566958] env[67424]: _type = "Task" [ 2253.566958] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.573751] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200105, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.077367] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200105, 'name': CreateVM_Task, 'duration_secs': 0.307002} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2254.077572] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2254.078208] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2254.078371] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2254.078688] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2254.078929] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-468c0b08-8699-4adf-b5fa-826f23d316d8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.083128] env[67424]: DEBUG oslo_vmware.api [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Waiting for the task: (returnval){ [ 2254.083128] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52799bff-3071-97a4-eae9-3c19c32ecf6a" [ 2254.083128] env[67424]: _type = "Task" [ 2254.083128] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2254.091353] env[67424]: DEBUG oslo_vmware.api [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52799bff-3071-97a4-eae9-3c19c32ecf6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.592986] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2254.593279] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2254.593551] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f413325a-1754-4dc0-8387-9f976505a10a tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2255.359488] env[67424]: DEBUG nova.compute.manager [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Received event network-changed-9751b36d-b2d2-416e-a03b-76989dfea16a {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2255.359770] env[67424]: DEBUG nova.compute.manager [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Refreshing instance network info cache due to event network-changed-9751b36d-b2d2-416e-a03b-76989dfea16a. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2255.359969] env[67424]: DEBUG oslo_concurrency.lockutils [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] Acquiring lock "refresh_cache-0ad7bf34-a280-4a03-9398-974f0716fa88" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2255.360072] env[67424]: DEBUG oslo_concurrency.lockutils [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] Acquired lock "refresh_cache-0ad7bf34-a280-4a03-9398-974f0716fa88" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2255.360214] env[67424]: DEBUG nova.network.neutron [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Refreshing network info cache for port 9751b36d-b2d2-416e-a03b-76989dfea16a {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2255.610721] env[67424]: DEBUG nova.network.neutron [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Updated VIF entry in instance network info cache for port 9751b36d-b2d2-416e-a03b-76989dfea16a. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2255.611131] env[67424]: DEBUG nova.network.neutron [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Updating instance_info_cache with network_info: [{"id": "9751b36d-b2d2-416e-a03b-76989dfea16a", "address": "fa:16:3e:f4:17:e1", "network": {"id": "25cfc1e7-f0b2-46fa-ab3c-1c5cc06ae791", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1975847852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "267735d9526147c9b5fc6cd41806abab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1d468f87-964a-4fb6-bab3-b83f6f2646b5", "external-id": "nsx-vlan-transportzone-131", "segmentation_id": 131, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9751b36d-b2", "ovs_interfaceid": "9751b36d-b2d2-416e-a03b-76989dfea16a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2255.620504] env[67424]: DEBUG oslo_concurrency.lockutils [req-23dd7ab2-3157-4fc9-8a89-26eb719b4bdb req-ead152a2-f070-4aa5-94b9-4149158b1d99 service nova] Releasing lock "refresh_cache-0ad7bf34-a280-4a03-9398-974f0716fa88" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2261.563459] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.830321] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b03efdad-6d7e-42d4-8f48-3393b59b3fc8 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquiring lock "35c3054d-517b-4ea4-acd5-7135c07e4e9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2281.375181] env[67424]: WARNING oslo_vmware.rw_handles [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2281.375181] env[67424]: ERROR oslo_vmware.rw_handles [ 2281.375911] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2281.377396] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2281.377640] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Copying Virtual Disk [datastore2] vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/2c5fd47d-b9a0-4c96-aac3-19dfdf9a5378/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2281.377928] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0f450ca-f5ce-4a42-83b3-31e652e36881 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.386250] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for the task: (returnval){ [ 2281.386250] env[67424]: value = "task-3200106" [ 2281.386250] env[67424]: _type = "Task" [ 2281.386250] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.395162] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Task: {'id': task-3200106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.896793] env[67424]: DEBUG oslo_vmware.exceptions [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2281.897081] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2281.897643] env[67424]: ERROR nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2281.897643] env[67424]: Faults: ['InvalidArgument'] [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Traceback (most recent call last): [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] yield resources [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self.driver.spawn(context, instance, image_meta, [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self._fetch_image_if_missing(context, vi) [ 2281.897643] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] image_cache(vi, tmp_image_ds_loc) [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] vm_util.copy_virtual_disk( [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] session._wait_for_task(vmdk_copy_task) [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] return self.wait_for_task(task_ref) [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] return evt.wait() [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] result = hub.switch() [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2281.898042] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] return self.greenlet.switch() [ 2281.898617] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2281.898617] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self.f(*self.args, **self.kw) [ 2281.898617] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2281.898617] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] raise exceptions.translate_fault(task_info.error) [ 2281.898617] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2281.898617] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Faults: ['InvalidArgument'] [ 2281.898617] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] [ 2281.898617] env[67424]: INFO nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Terminating instance [ 2281.900027] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2281.900027] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2281.900027] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1656f831-ea84-4856-94fe-1b88b1e7ff3e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.902220] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2281.902439] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2281.903152] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf060a85-21e3-4ae2-a382-425b68e0a145 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.909582] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2281.909784] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-150ae43d-c535-430f-a10a-1729746a932c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.911807] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2281.911977] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2281.912897] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39bcb2e0-28e5-4f7b-8d51-2d5f42126f5c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.917664] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Waiting for the task: (returnval){ [ 2281.917664] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52c43c89-dd2e-5bc3-a1d1-a3bc7e656bb5" [ 2281.917664] env[67424]: _type = "Task" [ 2281.917664] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.926135] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52c43c89-dd2e-5bc3-a1d1-a3bc7e656bb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.976335] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2281.976697] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2281.976957] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Deleting the datastore file [datastore2] 8e178dab-b6bb-4e29-bac9-64ab2b925762 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2281.977320] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e212081-210b-49d0-8fa5-6761c8fea07f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.983085] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for the task: (returnval){ [ 2281.983085] env[67424]: value = "task-3200108" [ 2281.983085] env[67424]: _type = "Task" [ 2281.983085] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.990550] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Task: {'id': task-3200108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.428280] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2282.428654] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Creating directory with path [datastore2] vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2282.428843] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b32cdc0a-2169-4f9d-acc1-557518b74838 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.439791] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Created directory with path [datastore2] vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2282.439980] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Fetch image to [datastore2] vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2282.440169] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2282.440927] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a78bb7b-3f3a-440e-b4a8-7c889d4dba15 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.447353] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e1a5d6-58e0-4cec-9e86-2293022c6486 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.456482] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635ccbe8-1971-4410-9545-665ea2d2826d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.488425] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e198e44-acc3-43bd-b1c8-b4684512df3e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.494972] env[67424]: DEBUG oslo_vmware.api [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Task: {'id': task-3200108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081881} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.496347] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2282.496600] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2282.496825] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2282.497012] env[67424]: INFO nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2282.498771] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-27f20f5f-bb33-4272-baa9-150456eb3b32 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.500603] env[67424]: DEBUG nova.compute.claims [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2282.500772] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.500981] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.521612] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2282.578102] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2282.639972] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2282.640177] env[67424]: DEBUG oslo_vmware.rw_handles [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2282.690705] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc45200f-57dc-46a6-a3d8-865b32e9c726 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.698092] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406d7c0e-2d48-4158-a966-9846ccbdad7a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.726937] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d5208d-f5e0-4822-bea3-6cc494a5ff18 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.733549] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f963a1f-fcae-494d-9f3f-89daef507430 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.746222] env[67424]: DEBUG nova.compute.provider_tree [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2282.754812] env[67424]: DEBUG nova.scheduler.client.report [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2282.769065] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.268s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.769582] env[67424]: ERROR nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.769582] env[67424]: Faults: ['InvalidArgument'] [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Traceback (most recent call last): [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self.driver.spawn(context, instance, image_meta, [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self._fetch_image_if_missing(context, vi) [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] image_cache(vi, tmp_image_ds_loc) [ 2282.769582] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] vm_util.copy_virtual_disk( [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] session._wait_for_task(vmdk_copy_task) [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] return self.wait_for_task(task_ref) [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] return evt.wait() [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] result = hub.switch() [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] return self.greenlet.switch() [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2282.769993] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] self.f(*self.args, **self.kw) [ 2282.770434] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2282.770434] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] raise exceptions.translate_fault(task_info.error) [ 2282.770434] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.770434] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Faults: ['InvalidArgument'] [ 2282.770434] env[67424]: ERROR nova.compute.manager [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] [ 2282.770434] env[67424]: DEBUG nova.compute.utils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2282.772048] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Build of instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 was re-scheduled: A specified parameter was not correct: fileType [ 2282.772048] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2282.772459] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2282.772655] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2282.772834] env[67424]: DEBUG nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2282.772998] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2283.071857] env[67424]: DEBUG nova.network.neutron [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.086240] env[67424]: INFO nova.compute.manager [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Took 0.31 seconds to deallocate network for instance. [ 2283.183970] env[67424]: INFO nova.scheduler.client.report [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Deleted allocations for instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 [ 2283.205617] env[67424]: DEBUG oslo_concurrency.lockutils [None req-2b5a2c0a-c2d9-4ee2-9e44-554e8db50f96 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 582.751s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.205880] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 436.459s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.206101] env[67424]: INFO nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] During sync_power_state the instance has a pending task (spawning). Skip. [ 2283.206291] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.206871] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.083s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.207104] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "8e178dab-b6bb-4e29-bac9-64ab2b925762-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.207316] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.207483] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.209657] env[67424]: INFO nova.compute.manager [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Terminating instance [ 2283.211573] env[67424]: DEBUG nova.compute.manager [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2283.211788] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2283.212076] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-899f313c-ea28-4189-9073-9e204647ef0e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.223010] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14eaebb3-ab48-4255-a416-3d3b65eb0710 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.249289] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e178dab-b6bb-4e29-bac9-64ab2b925762 could not be found. [ 2283.249557] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2283.249672] env[67424]: INFO nova.compute.manager [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2283.249913] env[67424]: DEBUG oslo.service.loopingcall [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2283.250409] env[67424]: DEBUG nova.compute.manager [-] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2283.250515] env[67424]: DEBUG nova.network.neutron [-] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2283.276278] env[67424]: DEBUG nova.network.neutron [-] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.284959] env[67424]: INFO nova.compute.manager [-] [instance: 8e178dab-b6bb-4e29-bac9-64ab2b925762] Took 0.03 seconds to deallocate network for instance. [ 2283.372992] env[67424]: DEBUG oslo_concurrency.lockutils [None req-9eb874f7-c161-4222-84a5-463848fe437e tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Lock "8e178dab-b6bb-4e29-bac9-64ab2b925762" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.270483] env[67424]: DEBUG oslo_concurrency.lockutils [None req-28cc70de-2d88-4dbf-9e23-e7b9cf1edfd1 tempest-DeleteServersTestJSON-1876160165 tempest-DeleteServersTestJSON-1876160165-project-member] Acquiring lock "4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2304.388528] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.388703] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.389189] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2309.388605] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2310.388064] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2310.400173] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.400512] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.400575] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.400698] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2310.401834] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f703a37-9a8f-4fe0-93b1-8ad3a4f291ca {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.411881] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c886416-57f0-4901-8b04-0ec79584f429 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.425679] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224503f7-c2e3-4fca-8957-bcb3c7a831b0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.431686] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85547c2b-abd3-4f8f-b539-ffa960c5afdc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.459681] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180977MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2310.459804] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.459990] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.519771] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 461f92ee-b076-4cb7-8170-66cddb898b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.519931] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.520069] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.520189] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 35c3054d-517b-4ea4-acd5-7135c07e4e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.520306] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.520449] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0ad7bf34-a280-4a03-9398-974f0716fa88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.520630] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2310.520766] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2310.535605] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing inventories for resource provider b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2310.547827] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Updating ProviderTree inventory for provider b21acede-6243-4c82-934a-a3956380220f from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2310.547997] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Updating inventory in ProviderTree for provider b21acede-6243-4c82-934a-a3956380220f with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2310.558078] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing aggregate associations for resource provider b21acede-6243-4c82-934a-a3956380220f, aggregates: None {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2310.574729] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Refreshing trait associations for resource provider b21acede-6243-4c82-934a-a3956380220f, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=67424) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2310.645448] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01316074-8070-4bde-badc-07687af83a73 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.652915] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2c8129-3bb2-4dcc-a9d3-641c54b45c98 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.682882] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee72211-3645-4935-a170-2d9bd3711983 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.689486] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3316e25e-19f3-4e0a-aee3-c021e7090768 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.702051] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2310.710715] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2310.723519] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2310.723703] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.264s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.718974] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.387383] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.387585] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2312.387710] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2312.403313] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2312.403485] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2312.403640] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2312.403767] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2312.403887] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2312.404015] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2312.404143] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2312.404635] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.404812] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2314.388257] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2318.384235] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.390339] env[67424]: WARNING oslo_vmware.rw_handles [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2331.390339] env[67424]: ERROR oslo_vmware.rw_handles [ 2331.391196] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2331.393018] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2331.393336] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Copying Virtual Disk [datastore2] vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/2124019f-7f40-491f-a80c-10d57e6011b5/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2331.393699] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5ddfb31-0947-49cf-bb8d-2ee3fdcc3d7f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.400936] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Waiting for the task: (returnval){ [ 2331.400936] env[67424]: value = "task-3200109" [ 2331.400936] env[67424]: _type = "Task" [ 2331.400936] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.409297] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Task: {'id': task-3200109, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.911156] env[67424]: DEBUG oslo_vmware.exceptions [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2331.911441] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2331.911982] env[67424]: ERROR nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2331.911982] env[67424]: Faults: ['InvalidArgument'] [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Traceback (most recent call last): [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] yield resources [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self.driver.spawn(context, instance, image_meta, [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self._fetch_image_if_missing(context, vi) [ 2331.911982] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] image_cache(vi, tmp_image_ds_loc) [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] vm_util.copy_virtual_disk( [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] session._wait_for_task(vmdk_copy_task) [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] return self.wait_for_task(task_ref) [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] return evt.wait() [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] result = hub.switch() [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2331.912387] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] return self.greenlet.switch() [ 2331.912818] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2331.912818] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self.f(*self.args, **self.kw) [ 2331.912818] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2331.912818] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] raise exceptions.translate_fault(task_info.error) [ 2331.912818] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2331.912818] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Faults: ['InvalidArgument'] [ 2331.912818] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] [ 2331.912818] env[67424]: INFO nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Terminating instance [ 2331.913955] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2331.914669] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2331.914669] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e654568f-9f5d-4855-8972-a105f957f47a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.916512] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2331.916736] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2331.917441] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e95641-4389-4cf4-937e-80fb5cd4eee8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.924133] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2331.924344] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-397c09cc-a9f1-4cbb-87e0-749e7eb9678f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.926450] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2331.926639] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2331.927559] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0547044-ee0a-4a22-8b65-4d92df8763ed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.932395] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 2331.932395] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52a98173-953a-7d70-ea42-cd7e5cfc59d5" [ 2331.932395] env[67424]: _type = "Task" [ 2331.932395] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.940162] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52a98173-953a-7d70-ea42-cd7e5cfc59d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.999438] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2331.999684] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2331.999870] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Deleting the datastore file [datastore2] 461f92ee-b076-4cb7-8170-66cddb898b99 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2332.000168] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd43a741-c317-4d22-951f-0e6d36e8a666 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.005982] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Waiting for the task: (returnval){ [ 2332.005982] env[67424]: value = "task-3200111" [ 2332.005982] env[67424]: _type = "Task" [ 2332.005982] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.014427] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Task: {'id': task-3200111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.442185] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2332.442650] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating directory with path [datastore2] vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2332.442712] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f56ab3fc-2b9c-4db8-848f-2346d6c2a1ba {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.453388] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Created directory with path [datastore2] vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2332.453596] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Fetch image to [datastore2] vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2332.453787] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2332.454498] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d18cd1-abca-450f-a4fe-839bbda9f936 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.460710] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb2962b-23ca-403c-8fca-c3ca5f1a9995 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.469424] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e89ac7-1631-4aeb-95b9-cf7ead359b77 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.499431] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d25ee8-3a93-4aa9-b474-542f1143a865 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.504624] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2e7efe03-b84a-4217-87ae-a61691880877 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.513448] env[67424]: DEBUG oslo_vmware.api [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Task: {'id': task-3200111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074803} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.513685] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2332.513864] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2332.514046] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2332.514223] env[67424]: INFO nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2332.516491] env[67424]: DEBUG nova.compute.claims [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2332.516744] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.517035] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.526716] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2332.626771] env[67424]: DEBUG oslo_vmware.rw_handles [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2332.687369] env[67424]: DEBUG oslo_vmware.rw_handles [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2332.687547] env[67424]: DEBUG oslo_vmware.rw_handles [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2332.695441] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95099511-3dd0-4bf4-988b-8e99a39a1502 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.703143] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda834cf-71da-440c-87a6-8abaa1e95a7b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.733426] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40e5337-0fa2-4a8f-a249-a8b5c3f3fcac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.739924] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d9c9aa-7007-4055-8d8c-57723977d3d1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.752324] env[67424]: DEBUG nova.compute.provider_tree [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2332.760902] env[67424]: DEBUG nova.scheduler.client.report [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2332.775719] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.259s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2332.776238] env[67424]: ERROR nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2332.776238] env[67424]: Faults: ['InvalidArgument'] [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Traceback (most recent call last): [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self.driver.spawn(context, instance, image_meta, [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self._fetch_image_if_missing(context, vi) [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] image_cache(vi, tmp_image_ds_loc) [ 2332.776238] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] vm_util.copy_virtual_disk( [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] session._wait_for_task(vmdk_copy_task) [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] return self.wait_for_task(task_ref) [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] return evt.wait() [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] result = hub.switch() [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] return self.greenlet.switch() [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2332.776667] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] self.f(*self.args, **self.kw) [ 2332.777098] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2332.777098] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] raise exceptions.translate_fault(task_info.error) [ 2332.777098] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2332.777098] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Faults: ['InvalidArgument'] [ 2332.777098] env[67424]: ERROR nova.compute.manager [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] [ 2332.777098] env[67424]: DEBUG nova.compute.utils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2332.778361] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Build of instance 461f92ee-b076-4cb7-8170-66cddb898b99 was re-scheduled: A specified parameter was not correct: fileType [ 2332.778361] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2332.778726] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2332.778899] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2332.779080] env[67424]: DEBUG nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2332.779244] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2333.078194] env[67424]: DEBUG nova.network.neutron [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2333.088314] env[67424]: INFO nova.compute.manager [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Took 0.31 seconds to deallocate network for instance. [ 2333.211201] env[67424]: INFO nova.scheduler.client.report [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Deleted allocations for instance 461f92ee-b076-4cb7-8170-66cddb898b99 [ 2333.234568] env[67424]: DEBUG oslo_concurrency.lockutils [None req-8eefd43f-731e-4bb3-a4c1-17229be37e50 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "461f92ee-b076-4cb7-8170-66cddb898b99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 478.473s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.234868] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "461f92ee-b076-4cb7-8170-66cddb898b99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 282.548s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.235095] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Acquiring lock "461f92ee-b076-4cb7-8170-66cddb898b99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.235652] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "461f92ee-b076-4cb7-8170-66cddb898b99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.235652] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "461f92ee-b076-4cb7-8170-66cddb898b99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.237626] env[67424]: INFO nova.compute.manager [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Terminating instance [ 2333.239446] env[67424]: DEBUG nova.compute.manager [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2333.239663] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2333.240163] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dca96e7f-f753-4380-b818-dcb86baca2ed {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.249203] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbab0e3-8115-444e-9f93-e0d840afc5fc {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.277909] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 461f92ee-b076-4cb7-8170-66cddb898b99 could not be found. [ 2333.278138] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2333.278345] env[67424]: INFO nova.compute.manager [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2333.278560] env[67424]: DEBUG oslo.service.loopingcall [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2333.279174] env[67424]: DEBUG nova.compute.manager [-] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2333.279174] env[67424]: DEBUG nova.network.neutron [-] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2333.321189] env[67424]: DEBUG nova.network.neutron [-] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2333.332129] env[67424]: INFO nova.compute.manager [-] [instance: 461f92ee-b076-4cb7-8170-66cddb898b99] Took 0.05 seconds to deallocate network for instance. [ 2333.415750] env[67424]: DEBUG oslo_concurrency.lockutils [None req-f7e53d38-8a69-46b2-95db-5dbd5c469062 tempest-ServerGroupTestJSON-1203219876 tempest-ServerGroupTestJSON-1203219876-project-member] Lock "461f92ee-b076-4cb7-8170-66cddb898b99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2364.389888] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.387655] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.388041] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2369.389652] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.383579] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.387215] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.404673] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2371.404904] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2371.405114] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2371.405289] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2371.406782] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab03c81-64fa-480c-a158-8319fb37e445 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.415425] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea92075-89ca-4d8f-915d-2d91c9421596 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.429513] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bde7ba-bf6f-408a-9ef5-47feeb76a7b8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.435679] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7329a5-c7a1-433c-a2e4-12c84ae786a7 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.464300] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181006MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2371.464448] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2371.464640] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2371.521929] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 909d3788-23da-446f-9c47-46df54003e1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.522095] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.522223] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 35c3054d-517b-4ea4-acd5-7135c07e4e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.522345] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.522462] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0ad7bf34-a280-4a03-9398-974f0716fa88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.522633] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2371.522789] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2371.586682] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fd10f7-c6b0-421c-a6e0-0e2c5fc0db8f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.594522] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ab19e0-8b43-40ec-b5a2-43c0bd2cfc84 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.622948] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f1734e-dae3-4bd1-944c-5866945a6980 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.629701] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad67978-c090-4f4f-90cf-d720d9fa13e6 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.642328] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2371.650147] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2371.663176] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2371.663356] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.199s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.664609] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2374.387610] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2374.387811] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2374.387946] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2374.402933] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2374.403147] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2374.403216] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2374.403345] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2374.403468] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2374.403635] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2374.404089] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2374.404281] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2379.179322] env[67424]: WARNING oslo_vmware.rw_handles [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2379.179322] env[67424]: ERROR oslo_vmware.rw_handles [ 2379.179892] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2379.181734] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2379.181983] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Copying Virtual Disk [datastore2] vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/7491dedc-b145-4717-8020-08c53145a6e8/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2379.182278] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de2c4a6d-ae1c-44d2-9fbd-b1559debac4f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.189426] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 2379.189426] env[67424]: value = "task-3200112" [ 2379.189426] env[67424]: _type = "Task" [ 2379.189426] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.198523] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.700129] env[67424]: DEBUG oslo_vmware.exceptions [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2379.700460] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2379.701132] env[67424]: ERROR nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.701132] env[67424]: Faults: ['InvalidArgument'] [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] Traceback (most recent call last): [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] yield resources [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self.driver.spawn(context, instance, image_meta, [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self._fetch_image_if_missing(context, vi) [ 2379.701132] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] image_cache(vi, tmp_image_ds_loc) [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] vm_util.copy_virtual_disk( [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] session._wait_for_task(vmdk_copy_task) [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] return self.wait_for_task(task_ref) [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] return evt.wait() [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] result = hub.switch() [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2379.701457] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] return self.greenlet.switch() [ 2379.701799] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2379.701799] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self.f(*self.args, **self.kw) [ 2379.701799] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2379.701799] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] raise exceptions.translate_fault(task_info.error) [ 2379.701799] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.701799] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] Faults: ['InvalidArgument'] [ 2379.701799] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] [ 2379.701799] env[67424]: INFO nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Terminating instance [ 2379.703241] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.703476] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2379.703740] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9beb743-ebf1-4798-8814-fa28e85860ca {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.706207] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2379.706436] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2379.707204] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369ab100-d1e2-4db8-bd06-5b2f2b594657 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.714139] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2379.714367] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1a146c9-e59a-4137-8dc3-7ffda5c257a9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.716580] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2379.716790] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2379.717791] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c280f65d-ff34-44b2-b6bc-0cbbb5ccd238 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.722621] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2379.722621] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52147e99-b965-d40c-292a-e83606777fff" [ 2379.722621] env[67424]: _type = "Task" [ 2379.722621] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.732568] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52147e99-b965-d40c-292a-e83606777fff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.786748] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2379.786955] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2379.787151] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleting the datastore file [datastore2] 909d3788-23da-446f-9c47-46df54003e1c {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2379.787412] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05c0510f-ceec-42ac-9971-8a3bf20a1d59 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.793653] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for the task: (returnval){ [ 2379.793653] env[67424]: value = "task-3200114" [ 2379.793653] env[67424]: _type = "Task" [ 2379.793653] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.800797] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.232833] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2380.233107] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating directory with path [datastore2] vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2380.233354] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94379d96-b06e-4860-b26d-18030a830c78 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.244668] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Created directory with path [datastore2] vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2380.244864] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Fetch image to [datastore2] vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2380.245097] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2380.245864] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa6c0bd-73c7-494a-a5d6-8259c463c515 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.252481] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d3089a-3aff-472f-be3f-9120742c1d7b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.261243] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e13a5e5-d16f-44b5-b371-4ef2bb6a90ac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.291407] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad668556-2598-446e-82f7-61433d6af155 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.299666] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d0cd4f98-e0ab-46dc-8b39-c114b2384f4f {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.303990] env[67424]: DEBUG oslo_vmware.api [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Task: {'id': task-3200114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063811} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2380.304521] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2380.304712] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2380.304883] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2380.305070] env[67424]: INFO nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2380.307163] env[67424]: DEBUG nova.compute.claims [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2380.307328] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.307535] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.324349] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2380.378557] env[67424]: DEBUG oslo_vmware.rw_handles [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2380.439824] env[67424]: DEBUG oslo_vmware.rw_handles [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2380.440085] env[67424]: DEBUG oslo_vmware.rw_handles [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2380.469962] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a6b295-3706-45c3-b6f7-bc3ff649f2cd {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.476788] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6b67f7-bb01-4aac-8135-fd51774533f9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.506053] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e6115b-967d-45c4-b707-fe704e8d7b71 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.513083] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36aa4dc-946a-4bf4-abf6-890985a1f8a3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.525802] env[67424]: DEBUG nova.compute.provider_tree [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2380.534187] env[67424]: DEBUG nova.scheduler.client.report [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2380.547091] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.239s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.547615] env[67424]: ERROR nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2380.547615] env[67424]: Faults: ['InvalidArgument'] [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] Traceback (most recent call last): [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self.driver.spawn(context, instance, image_meta, [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self._fetch_image_if_missing(context, vi) [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] image_cache(vi, tmp_image_ds_loc) [ 2380.547615] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] vm_util.copy_virtual_disk( [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] session._wait_for_task(vmdk_copy_task) [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] return self.wait_for_task(task_ref) [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] return evt.wait() [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] result = hub.switch() [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] return self.greenlet.switch() [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2380.547921] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] self.f(*self.args, **self.kw) [ 2380.548243] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2380.548243] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] raise exceptions.translate_fault(task_info.error) [ 2380.548243] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2380.548243] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] Faults: ['InvalidArgument'] [ 2380.548243] env[67424]: ERROR nova.compute.manager [instance: 909d3788-23da-446f-9c47-46df54003e1c] [ 2380.548366] env[67424]: DEBUG nova.compute.utils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2380.549718] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Build of instance 909d3788-23da-446f-9c47-46df54003e1c was re-scheduled: A specified parameter was not correct: fileType [ 2380.549718] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2380.550098] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2380.550277] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2380.550449] env[67424]: DEBUG nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2380.550623] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2380.839085] env[67424]: DEBUG nova.network.neutron [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.856056] env[67424]: INFO nova.compute.manager [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Took 0.31 seconds to deallocate network for instance. [ 2380.949652] env[67424]: INFO nova.scheduler.client.report [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Deleted allocations for instance 909d3788-23da-446f-9c47-46df54003e1c [ 2380.970164] env[67424]: DEBUG oslo_concurrency.lockutils [None req-edc72b2d-6d43-4001-b9cd-a3196c6b177d tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "909d3788-23da-446f-9c47-46df54003e1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 486.955s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.970431] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "909d3788-23da-446f-9c47-46df54003e1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 291.231s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.970650] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Acquiring lock "909d3788-23da-446f-9c47-46df54003e1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.970864] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "909d3788-23da-446f-9c47-46df54003e1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.971038] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "909d3788-23da-446f-9c47-46df54003e1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.973063] env[67424]: INFO nova.compute.manager [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Terminating instance [ 2380.974819] env[67424]: DEBUG nova.compute.manager [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2380.975052] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2380.975534] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be03bc7f-255c-4501-a03c-13b6b0a85bb4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.986062] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a789f2-31c0-4764-8eac-9a4c9dd06671 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.011679] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 909d3788-23da-446f-9c47-46df54003e1c could not be found. [ 2381.011870] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2381.012089] env[67424]: INFO nova.compute.manager [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2381.012339] env[67424]: DEBUG oslo.service.loopingcall [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2381.012547] env[67424]: DEBUG nova.compute.manager [-] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2381.012640] env[67424]: DEBUG nova.network.neutron [-] [instance: 909d3788-23da-446f-9c47-46df54003e1c] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2381.034276] env[67424]: DEBUG nova.network.neutron [-] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2381.041994] env[67424]: INFO nova.compute.manager [-] [instance: 909d3788-23da-446f-9c47-46df54003e1c] Took 0.03 seconds to deallocate network for instance. [ 2381.125884] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b40e527b-3a17-46dc-a7f0-447e3080feb7 tempest-ServerDiskConfigTestJSON-95061694 tempest-ServerDiskConfigTestJSON-95061694-project-member] Lock "909d3788-23da-446f-9c47-46df54003e1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2416.387981] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.388335] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances with incomplete migration {{(pid=67424) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2421.389112] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2426.395525] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2426.422994] env[67424]: WARNING oslo_vmware.rw_handles [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles response.begin() [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2426.422994] env[67424]: ERROR oslo_vmware.rw_handles [ 2426.423433] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Downloaded image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2426.425335] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Caching image {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2426.425570] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Copying Virtual Disk [datastore2] vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk to [datastore2] vmware_temp/d51059ad-9644-4201-99af-3c950e16cf78/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk {{(pid=67424) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2426.425857] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7c4f836-ebd0-4072-8a2d-abbbba4ac7b1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.433607] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2426.433607] env[67424]: value = "task-3200115" [ 2426.433607] env[67424]: _type = "Task" [ 2426.433607] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2426.442484] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2426.944753] env[67424]: DEBUG oslo_vmware.exceptions [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Fault InvalidArgument not matched. {{(pid=67424) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2426.945047] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2426.945624] env[67424]: ERROR nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2426.945624] env[67424]: Faults: ['InvalidArgument'] [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Traceback (most recent call last): [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] yield resources [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self.driver.spawn(context, instance, image_meta, [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self._fetch_image_if_missing(context, vi) [ 2426.945624] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] image_cache(vi, tmp_image_ds_loc) [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] vm_util.copy_virtual_disk( [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] session._wait_for_task(vmdk_copy_task) [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] return self.wait_for_task(task_ref) [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] return evt.wait() [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] result = hub.switch() [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2426.945921] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] return self.greenlet.switch() [ 2426.946416] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2426.946416] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self.f(*self.args, **self.kw) [ 2426.946416] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2426.946416] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] raise exceptions.translate_fault(task_info.error) [ 2426.946416] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2426.946416] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Faults: ['InvalidArgument'] [ 2426.946416] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] [ 2426.946416] env[67424]: INFO nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Terminating instance [ 2426.947635] env[67424]: DEBUG oslo_concurrency.lockutils [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2426.947845] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2426.948100] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f58afabf-c6ab-48a1-aa89-57da44874862 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.950491] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2426.950683] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2426.951403] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7bbbe4-7c7c-4af6-aad4-68ec9246edaa {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.958288] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Unregistering the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2426.958505] env[67424]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95f049eb-8137-463e-aa08-62771cdbd83a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.960682] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2426.960858] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=67424) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2426.961817] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbc8ffb8-f509-419d-b409-04bb9edf0d22 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.966538] env[67424]: DEBUG oslo_vmware.api [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Waiting for the task: (returnval){ [ 2426.966538] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52ca8ee1-dcc8-0159-47a0-c33f969aa47b" [ 2426.966538] env[67424]: _type = "Task" [ 2426.966538] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2426.973252] env[67424]: DEBUG oslo_vmware.api [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]52ca8ee1-dcc8-0159-47a0-c33f969aa47b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.034739] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Unregistered the VM {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2427.034960] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Deleting contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2427.035140] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleting the datastore file [datastore2] 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2427.035407] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efaa9fac-bed2-4e86-ac2b-eb3e74f24c82 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.041907] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2427.041907] env[67424]: value = "task-3200117" [ 2427.041907] env[67424]: _type = "Task" [ 2427.041907] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2427.049865] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.476754] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Preparing fetch location {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2427.477065] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating directory with path [datastore2] vmware_temp/91b1a188-58ab-4194-ba27-4abbd2de69cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2427.477248] env[67424]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7279071-b15a-4000-983b-b9fc51eea3eb {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.488556] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Created directory with path [datastore2] vmware_temp/91b1a188-58ab-4194-ba27-4abbd2de69cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2427.488741] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Fetch image to [datastore2] vmware_temp/91b1a188-58ab-4194-ba27-4abbd2de69cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2427.488908] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to [datastore2] vmware_temp/91b1a188-58ab-4194-ba27-4abbd2de69cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk on the data store datastore2 {{(pid=67424) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2427.489629] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f9bbb5-e705-41b4-b18e-9f64c3d90360 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.496020] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aafe021-6b33-4012-bb8e-a33b7243a8ff {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.504785] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22a3d4b-3b91-40aa-91d5-e92af71ae02d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.534340] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16afe8d4-e6c2-4baa-a365-fa823cac1f3b {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.539686] env[67424]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d5cf07fb-e93f-4d11-9f64-706b948974c1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.550825] env[67424]: DEBUG oslo_vmware.api [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': task-3200117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06593} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2427.551062] env[67424]: DEBUG nova.virt.vmwareapi.ds_util [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleted the datastore file {{(pid=67424) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2427.551244] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Deleted contents of the VM from datastore datastore2 {{(pid=67424) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2427.551406] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2427.551568] env[67424]: INFO nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2427.553680] env[67424]: DEBUG nova.compute.claims [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Aborting claim: {{(pid=67424) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2427.553873] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2427.554169] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2427.559410] env[67424]: DEBUG nova.virt.vmwareapi.images [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Downloading image file data 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff to the data store datastore2 {{(pid=67424) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2427.614341] env[67424]: DEBUG oslo_vmware.rw_handles [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91b1a188-58ab-4194-ba27-4abbd2de69cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2427.677221] env[67424]: DEBUG oslo_vmware.rw_handles [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Completed reading data from the image iterator. {{(pid=67424) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2427.677597] env[67424]: DEBUG oslo_vmware.rw_handles [None req-fa2ade67-39ed-4cfa-a404-b86c105c3a90 tempest-AttachInterfacesTestJSON-1868380497 tempest-AttachInterfacesTestJSON-1868380497-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91b1a188-58ab-4194-ba27-4abbd2de69cb/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=67424) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2427.750663] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c75c84b-3942-4da9-a91e-294ceea3640a {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.757696] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354ce684-fdcd-4e32-a520-7f5cdd7e950c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.787731] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656d840d-217b-4011-843d-f4ba9d4e81b8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.794597] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7cbe5b-0030-4657-91b7-747ad3740df3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.807193] env[67424]: DEBUG nova.compute.provider_tree [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2427.815266] env[67424]: DEBUG nova.scheduler.client.report [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2427.828349] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.274s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2427.828858] env[67424]: ERROR nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2427.828858] env[67424]: Faults: ['InvalidArgument'] [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Traceback (most recent call last): [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self.driver.spawn(context, instance, image_meta, [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self._fetch_image_if_missing(context, vi) [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] image_cache(vi, tmp_image_ds_loc) [ 2427.828858] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] vm_util.copy_virtual_disk( [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] session._wait_for_task(vmdk_copy_task) [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] return self.wait_for_task(task_ref) [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] return evt.wait() [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] result = hub.switch() [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] return self.greenlet.switch() [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2427.829144] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] self.f(*self.args, **self.kw) [ 2427.829481] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2427.829481] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] raise exceptions.translate_fault(task_info.error) [ 2427.829481] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2427.829481] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Faults: ['InvalidArgument'] [ 2427.829481] env[67424]: ERROR nova.compute.manager [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] [ 2427.829614] env[67424]: DEBUG nova.compute.utils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] VimFaultException {{(pid=67424) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2427.830878] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Build of instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 was re-scheduled: A specified parameter was not correct: fileType [ 2427.830878] env[67424]: Faults: ['InvalidArgument'] {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2427.831266] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Unplugging VIFs for instance {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2427.831441] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67424) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2427.831611] env[67424]: DEBUG nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2427.831775] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2428.180388] env[67424]: DEBUG nova.network.neutron [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2428.195230] env[67424]: INFO nova.compute.manager [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Took 0.36 seconds to deallocate network for instance. [ 2428.297395] env[67424]: INFO nova.scheduler.client.report [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Deleted allocations for instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 [ 2428.318076] env[67424]: DEBUG oslo_concurrency.lockutils [None req-08310d86-d147-4cc5-b4aa-2c3bf35bb6ba tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 362.651s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2428.318528] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 166.755s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2428.318761] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2428.319027] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2428.319179] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2428.321362] env[67424]: INFO nova.compute.manager [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Terminating instance [ 2428.323343] env[67424]: DEBUG nova.compute.manager [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Start destroying the instance on the hypervisor. {{(pid=67424) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2428.323475] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Destroying instance {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2428.324231] env[67424]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01d4bb44-f56a-43cb-bda8-1af85481de14 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.335351] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6117f0d-6faa-40bf-9718-738565e5e8b0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.360535] env[67424]: WARNING nova.virt.vmwareapi.vmops [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 55bf5d18-e16a-4fed-b11b-30ceb40c2d46 could not be found. [ 2428.360741] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Instance destroyed {{(pid=67424) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2428.360916] env[67424]: INFO nova.compute.manager [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2428.361196] env[67424]: DEBUG oslo.service.loopingcall [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2428.361415] env[67424]: DEBUG nova.compute.manager [-] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Deallocating network for instance {{(pid=67424) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2428.361509] env[67424]: DEBUG nova.network.neutron [-] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] deallocate_for_instance() {{(pid=67424) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2428.398592] env[67424]: DEBUG nova.network.neutron [-] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Updating instance_info_cache with network_info: [] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2428.407088] env[67424]: INFO nova.compute.manager [-] [instance: 55bf5d18-e16a-4fed-b11b-30ceb40c2d46] Took 0.05 seconds to deallocate network for instance. [ 2428.488527] env[67424]: DEBUG oslo_concurrency.lockutils [None req-b829a1ce-1e9d-4717-abc9-454829e5af98 tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "55bf5d18-e16a-4fed-b11b-30ceb40c2d46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2429.378965] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "e25bd18f-be81-4c95-9bfb-2c85cb07d620" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2429.379236] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "e25bd18f-be81-4c95-9bfb-2c85cb07d620" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2429.387555] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.387724] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.387868] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67424) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2429.389908] env[67424]: DEBUG nova.compute.manager [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Starting instance... {{(pid=67424) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2429.436856] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2429.437062] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2429.438456] env[67424]: INFO nova.compute.claims [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2429.534867] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b070cb-0b22-4964-85e1-31cb7ee8ce20 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.542205] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd32a9a-2f4c-449c-90b3-1c335d2963e3 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.572616] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672822a8-daf6-4364-99da-d98ca31fe5f4 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.578965] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de64df9b-c7f4-48ca-a618-63e15d6b4be0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.591197] env[67424]: DEBUG nova.compute.provider_tree [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2429.599329] env[67424]: DEBUG nova.scheduler.client.report [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2429.611914] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.175s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2429.612364] env[67424]: DEBUG nova.compute.manager [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Start building networks asynchronously for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2429.644521] env[67424]: DEBUG nova.compute.utils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Using /dev/sd instead of None {{(pid=67424) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2429.646036] env[67424]: DEBUG nova.compute.manager [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Allocating IP information in the background. {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2429.646271] env[67424]: DEBUG nova.network.neutron [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] allocate_for_instance() {{(pid=67424) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2429.655852] env[67424]: DEBUG nova.compute.manager [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Start building block device mappings for instance. {{(pid=67424) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2429.719058] env[67424]: DEBUG nova.compute.manager [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Start spawning the instance on the hypervisor. {{(pid=67424) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2429.744476] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:16:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:15:51Z,direct_url=,disk_format='vmdk',id=0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0b2360eb1420490db3cb04c00583f0da',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:15:51Z,virtual_size=,visibility=), allow threads: False {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2429.744726] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2429.744885] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image limits 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2429.745084] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Flavor pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2429.745240] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Image pref 0:0:0 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2429.745440] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67424) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2429.745671] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2429.745832] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2429.745998] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Got 1 possible topologies {{(pid=67424) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2429.746180] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2429.746354] env[67424]: DEBUG nova.virt.hardware [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67424) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2429.747235] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3051a59a-1df1-48e2-966e-878806e074a8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.757794] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6facd7a9-5863-4c76-ba0d-5b247d8fc6e1 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.880386] env[67424]: DEBUG nova.policy [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ac8098a2a904b4292a23bc38e8be219', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc66a2bf57d34e309f0f21a60c224076', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67424) authorize /opt/stack/nova/nova/policy.py:203}} [ 2430.178910] env[67424]: DEBUG nova.network.neutron [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Successfully created port: a743cdde-2431-4355-8614-95e522b17715 {{(pid=67424) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2430.746211] env[67424]: DEBUG nova.compute.manager [req-92492fc9-bb3c-41d4-a263-19e8234bf65c req-9265eb06-0b76-4412-a1eb-8fa2d5a848d7 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Received event network-vif-plugged-a743cdde-2431-4355-8614-95e522b17715 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2430.746452] env[67424]: DEBUG oslo_concurrency.lockutils [req-92492fc9-bb3c-41d4-a263-19e8234bf65c req-9265eb06-0b76-4412-a1eb-8fa2d5a848d7 service nova] Acquiring lock "e25bd18f-be81-4c95-9bfb-2c85cb07d620-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2430.746641] env[67424]: DEBUG oslo_concurrency.lockutils [req-92492fc9-bb3c-41d4-a263-19e8234bf65c req-9265eb06-0b76-4412-a1eb-8fa2d5a848d7 service nova] Lock "e25bd18f-be81-4c95-9bfb-2c85cb07d620-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2430.746809] env[67424]: DEBUG oslo_concurrency.lockutils [req-92492fc9-bb3c-41d4-a263-19e8234bf65c req-9265eb06-0b76-4412-a1eb-8fa2d5a848d7 service nova] Lock "e25bd18f-be81-4c95-9bfb-2c85cb07d620-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.746974] env[67424]: DEBUG nova.compute.manager [req-92492fc9-bb3c-41d4-a263-19e8234bf65c req-9265eb06-0b76-4412-a1eb-8fa2d5a848d7 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] No waiting events found dispatching network-vif-plugged-a743cdde-2431-4355-8614-95e522b17715 {{(pid=67424) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2430.747175] env[67424]: WARNING nova.compute.manager [req-92492fc9-bb3c-41d4-a263-19e8234bf65c req-9265eb06-0b76-4412-a1eb-8fa2d5a848d7 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Received unexpected event network-vif-plugged-a743cdde-2431-4355-8614-95e522b17715 for instance with vm_state building and task_state spawning. [ 2430.827138] env[67424]: DEBUG nova.network.neutron [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Successfully updated port: a743cdde-2431-4355-8614-95e522b17715 {{(pid=67424) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2430.839070] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "refresh_cache-e25bd18f-be81-4c95-9bfb-2c85cb07d620" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2430.839247] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "refresh_cache-e25bd18f-be81-4c95-9bfb-2c85cb07d620" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2430.839400] env[67424]: DEBUG nova.network.neutron [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Building network info cache for instance {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2430.882945] env[67424]: DEBUG nova.network.neutron [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Instance cache missing network info. {{(pid=67424) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2431.052678] env[67424]: DEBUG nova.network.neutron [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Updating instance_info_cache with network_info: [{"id": "a743cdde-2431-4355-8614-95e522b17715", "address": "fa:16:3e:20:4d:64", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa743cdde-24", "ovs_interfaceid": "a743cdde-2431-4355-8614-95e522b17715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2431.065274] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "refresh_cache-e25bd18f-be81-4c95-9bfb-2c85cb07d620" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2431.065553] env[67424]: DEBUG nova.compute.manager [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Instance network_info: |[{"id": "a743cdde-2431-4355-8614-95e522b17715", "address": "fa:16:3e:20:4d:64", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa743cdde-24", "ovs_interfaceid": "a743cdde-2431-4355-8614-95e522b17715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67424) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2431.065951] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:4d:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a743cdde-2431-4355-8614-95e522b17715', 'vif_model': 'vmxnet3'}] {{(pid=67424) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2431.074914] env[67424]: DEBUG oslo.service.loopingcall [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67424) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2431.075406] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Creating VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2431.076027] env[67424]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb407569-1a58-4c09-b372-492ff9e9f542 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.097113] env[67424]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2431.097113] env[67424]: value = "task-3200118" [ 2431.097113] env[67424]: _type = "Task" [ 2431.097113] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2431.105727] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200118, 'name': CreateVM_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2431.607103] env[67424]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200118, 'name': CreateVM_Task, 'duration_secs': 0.30647} completed successfully. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2431.607241] env[67424]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Created VM on the ESX host {{(pid=67424) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2431.607859] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2431.608042] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2431.608356] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2431.608601] env[67424]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-614d509f-cd56-404e-a89b-4c2e977cfd1d {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.612655] env[67424]: DEBUG oslo_vmware.api [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Waiting for the task: (returnval){ [ 2431.612655] env[67424]: value = "session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521b6651-ddce-eaa5-1c75-f5d4af3364cd" [ 2431.612655] env[67424]: _type = "Task" [ 2431.612655] env[67424]: } to complete. {{(pid=67424) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2431.620055] env[67424]: DEBUG oslo_vmware.api [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Task: {'id': session[528eaf4b-3d97-a6ca-ff19-c81c0183050b]521b6651-ddce-eaa5-1c75-f5d4af3364cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67424) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2432.123504] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2432.123826] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Processing image 0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff {{(pid=67424) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2432.123919] env[67424]: DEBUG oslo_concurrency.lockutils [None req-c475b9f5-a072-4baa-8e2a-17cada49edcb tempest-ServersTestJSON-1776744922 tempest-ServersTestJSON-1776744922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff/0bb1cc7b-ed02-4511-be0c-015a8a9fe1ff.vmdk" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2432.383834] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2432.387565] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2432.398814] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2432.399061] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2432.399245] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2432.399409] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67424) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2432.400495] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec0d69a-8cb8-442f-a7b8-9f8176ef7a08 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.409060] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dc2067-f359-46ce-9a4c-f04457a0e7a8 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.422443] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9ce918-6e54-4cca-a305-8f4adef25fee {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.428509] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906d34ce-5377-4c9d-b63f-806a3a1aa882 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.459108] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181004MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=67424) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2432.459350] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2432.459495] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2432.512863] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 35c3054d-517b-4ea4-acd5-7135c07e4e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2432.513022] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2432.513183] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance 0ad7bf34-a280-4a03-9398-974f0716fa88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2432.513310] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Instance e25bd18f-be81-4c95-9bfb-2c85cb07d620 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67424) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2432.513487] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2432.513711] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=67424) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2432.573205] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1047efbf-f581-43a9-9a1c-17b600a8cf2c {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.580726] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be2d260-5e02-4217-b74b-e8e094e3087e {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.609707] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1232aa-e3c2-4b10-82f9-b401a9db3535 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.616386] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ec1f5f-a180-4446-9988-997d0bd25aac {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.629330] env[67424]: DEBUG nova.compute.provider_tree [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed in ProviderTree for provider: b21acede-6243-4c82-934a-a3956380220f {{(pid=67424) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2432.638782] env[67424]: DEBUG nova.scheduler.client.report [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Inventory has not changed for provider b21acede-6243-4c82-934a-a3956380220f based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67424) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2432.652121] env[67424]: DEBUG nova.compute.resource_tracker [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67424) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2432.652312] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.193s {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2432.773068] env[67424]: DEBUG nova.compute.manager [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Received event network-changed-a743cdde-2431-4355-8614-95e522b17715 {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2432.773380] env[67424]: DEBUG nova.compute.manager [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Refreshing instance network info cache due to event network-changed-a743cdde-2431-4355-8614-95e522b17715. {{(pid=67424) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2432.773599] env[67424]: DEBUG oslo_concurrency.lockutils [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] Acquiring lock "refresh_cache-e25bd18f-be81-4c95-9bfb-2c85cb07d620" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2432.773741] env[67424]: DEBUG oslo_concurrency.lockutils [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] Acquired lock "refresh_cache-e25bd18f-be81-4c95-9bfb-2c85cb07d620" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2432.773901] env[67424]: DEBUG nova.network.neutron [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Refreshing network info cache for port a743cdde-2431-4355-8614-95e522b17715 {{(pid=67424) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2433.020959] env[67424]: DEBUG nova.network.neutron [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Updated VIF entry in instance network info cache for port a743cdde-2431-4355-8614-95e522b17715. {{(pid=67424) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2433.021554] env[67424]: DEBUG nova.network.neutron [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Updating instance_info_cache with network_info: [{"id": "a743cdde-2431-4355-8614-95e522b17715", "address": "fa:16:3e:20:4d:64", "network": {"id": "eecf956c-2115-4976-a7d6-ecdd8afcc7c7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1819846484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc66a2bf57d34e309f0f21a60c224076", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa743cdde-24", "ovs_interfaceid": "a743cdde-2431-4355-8614-95e522b17715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67424) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2433.032244] env[67424]: DEBUG oslo_concurrency.lockutils [req-634be811-a6c6-4eb0-b583-d80f755c6246 req-cf136e5d-fa3b-49e8-aabc-f70cabd92235 service nova] Releasing lock "refresh_cache-e25bd18f-be81-4c95-9bfb-2c85cb07d620" {{(pid=67424) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2433.387911] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2433.388167] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Cleaning up deleted instances {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2433.397241] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] There are 0 instances to clean {{(pid=67424) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2434.397301] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2434.397585] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2434.723519] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2434.724042] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Getting list of instances from cluster (obj){ [ 2434.724042] env[67424]: value = "domain-c8" [ 2434.724042] env[67424]: _type = "ClusterComputeResource" [ 2434.724042] env[67424]: } {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2434.725097] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f555553-dd76-4e6d-929e-c17b01a3c8d9 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2434.738180] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Got total of 4 instances {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2435.419183] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2435.419452] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Starting heal instance info cache {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2435.419533] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Rebuilding the list of instances to heal {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2435.432753] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 35c3054d-517b-4ea4-acd5-7135c07e4e9a] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2435.432928] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2435.433049] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: 0ad7bf34-a280-4a03-9398-974f0716fa88] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2435.433225] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] [instance: e25bd18f-be81-4c95-9bfb-2c85cb07d620] Skipping network cache update for instance because it is Building. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2435.433354] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Didn't find any instances for network info cache update. {{(pid=67424) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2435.433817] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2438.399556] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2447.807493] env[67424]: DEBUG oslo_concurrency.lockutils [None req-99120e66-583f-41c1-8268-5351ce6eda8f tempest-ServerMetadataNegativeTestJSON-1238406607 tempest-ServerMetadataNegativeTestJSON-1238406607-project-member] Acquiring lock "0ad7bf34-a280-4a03-9398-974f0716fa88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2464.702334] env[67424]: DEBUG oslo_service.periodic_task [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67424) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2464.718379] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Getting list of instances from cluster (obj){ [ 2464.718379] env[67424]: value = "domain-c8" [ 2464.718379] env[67424]: _type = "ClusterComputeResource" [ 2464.718379] env[67424]: } {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2464.719649] env[67424]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5941b3-d18b-41a2-b2a5-6149cde554d0 {{(pid=67424) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.733376] env[67424]: DEBUG nova.virt.vmwareapi.vmops [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Got total of 4 instances {{(pid=67424) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2464.733539] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 35c3054d-517b-4ea4-acd5-7135c07e4e9a {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2464.733737] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2464.733898] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid 0ad7bf34-a280-4a03-9398-974f0716fa88 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2464.734073] env[67424]: DEBUG nova.compute.manager [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Triggering sync for uuid e25bd18f-be81-4c95-9bfb-2c85cb07d620 {{(pid=67424) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2464.734513] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "35c3054d-517b-4ea4-acd5-7135c07e4e9a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2464.734633] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "4ccd7289-06aa-4d06-bbc4-fbb74e7abe2e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2464.734828] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "0ad7bf34-a280-4a03-9398-974f0716fa88" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2464.735031] env[67424]: DEBUG oslo_concurrency.lockutils [None req-59a8275f-d947-467a-9800-d3b35b0a0ea6 None None] Acquiring lock "e25bd18f-be81-4c95-9bfb-2c85cb07d620" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67424) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}}